diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
new file mode 100644
index 00000000..be8edda5
--- /dev/null
+++ b/.devcontainer/devcontainer.json
@@ -0,0 +1,25 @@
+// For format details, see https://aka.ms/devcontainer.json. For config options, see the
+// README at: https://github.com/devcontainers/templates/tree/main/src/typescript-node
+{
+ "name": "Node.js & TypeScript",
+ "image": "mcr.microsoft.com/devcontainers/typescript-node:20-bullseye",
+ "features": {
+ "ghcr.io/devcontainers/features/node:1": {},
+ "ghcr.io/devcontainers-contrib/features/typescript:2.0.14": {}
+ },
+
+ // Features to add to the dev container. More info: https://containers.dev/features.
+ // "features": {},
+
+ // Use 'forwardPorts' to make a list of ports inside the container available locally.
+ // "forwardPorts": [],
+
+ // Use 'postCreateCommand' to run commands after the container is created.
+ "postCreateCommand": "./.devcontainer/postCreateCommand.sh",
+
+ // Configure tool-specific properties.
+ // "customizations": {},
+
+ // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
+ // "remoteUser": "root"
+}
\ No newline at end of file
diff --git a/.devcontainer/postCreateCommand.sh b/.devcontainer/postCreateCommand.sh
new file mode 100644
index 00000000..adc964d2
--- /dev/null
+++ b/.devcontainer/postCreateCommand.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+npm install
\ No newline at end of file
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 892592bd..aaaf3457 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -4,38 +4,40 @@ on:
tags:
- '*'
pull_request:
+ workflow_dispatch:
jobs:
build:
- if: github.event_name == 'pull_request'
+ if: github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v2
- - uses: actions/setup-node@v2
+ uses: actions/checkout@v4
+ - uses: actions/setup-node@v3
with:
- node-version: '16'
+ node-version: '20'
- name: Test JUnit test import
uses: ./
- if: endsWith(github.ref, 'main') == false
with:
check_name: Example JUnit Test Report
report_paths: '**/surefire-reports/TEST-*.xml'
+ include_passed: true
+ detailed_summary: true
summary: '
Application (src/applications) |
---|
test |
'
check_title_template: '{{SUITE_NAME}} | {{TEST_NAME}}'
+ annotate_only: ${{ github.event_name == 'workflow_dispatch' }}
- name: Test PyTest test import
uses: ./
- if: endsWith(github.ref, 'main') == false
with:
check_name: Example Pytest Report
report_paths: test_results/python/report.xml
include_passed: true
detailed_summary: true
+ annotate_only: ${{ github.event_name == 'workflow_dispatch' }}
- name: Test Multi test import
uses: ./
- if: endsWith(github.ref, 'main') == false
with:
check_name: |-
Example Multi JUnit Test Report
@@ -51,6 +53,7 @@ jobs:
\n
transformers: |
[{"searchValue":"::","replaceValue":"/"}]
+ annotate_only: ${{ github.event_name == 'workflow_dispatch' }}
- name: Install NPM
run: |
@@ -66,18 +69,18 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
- name: "Build Changelog"
id: github_release
- uses: mikepenz/release-changelog-builder-action@v2
+ uses: mikepenz/release-changelog-builder-action@v4
with:
configuration: ".github/config/configuration.json"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Create Release
- uses: softprops/action-gh-release@6034af24fba4e5a8e975aaa6056554efe4c794d0
+ uses: mikepenz/action-gh-release@v1
with:
body: ${{steps.github_release.outputs.changelog}}
prerelease: ${{ contains(github.ref, '-rc') || contains(github.ref, '-b') || contains(github.ref, '-a') }}
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index a85b3f2a..995fe686 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -39,7 +39,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
diff --git a/README.md b/README.md
index 9f8050f4..76d5dcde 100644
--- a/README.md
+++ b/README.md
@@ -59,12 +59,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
- uses: actions/checkout@v1
+ uses: actions/checkout@v4
- name: Build and Run Tests
run: # execute your tests generating test results
- name: Publish Test Report
- uses: mikepenz/action-junit-report@v3
- if: always() # always run even if the previous step fails
+ uses: mikepenz/action-junit-report@v4
+ if: success() || failure() # always run even if the previous step fails
with:
report_paths: '**/build/test-results/test/TEST-*.xml'
```
@@ -73,7 +73,7 @@ jobs:
| **Input** | **Description** |
|----------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| `report_paths` | **Required**. [Glob](https://github.com/actions/toolkit/tree/master/packages/glob) expression to junit report paths. The default is `**/junit-reports/TEST-*.xml`. |
+| `report_paths` | Optional. [Glob](https://github.com/actions/toolkit/tree/master/packages/glob) expression to junit report paths. Defaults to: `**/junit-reports/TEST-*.xml`. |
| `token` | Optional. GitHub token for creating a check run. Set to `${{ github.token }}` by default. |
| `test_files_prefix` | Optional. Prepends the provided prefix to test file paths within the report when annotating on GitHub. |
| `exclude_sources` | Optional. Provide `,` seperated array of folders to ignore for source lookup. Defaults to: `/build/,/__pycache__/` |
@@ -82,6 +82,7 @@ jobs:
| `commit` | Optional. The commit SHA to update the status. This is useful when you run it with `workflow_run`. |
| `fail_on_failure` | Optional. Fail the build in case of a test failure. |
| `require_tests` | Optional. Fail if no test are found. |
+| `require_passed_tests` | Optional. Fail if no passed test are found. (This is stricter than `require_tests`, which accepts skipped tests). |
| `include_passed` | Optional. By default the action will skip passed items for the annotations. Enable this flag to include them. |
| `check_retries` | Optional. If a testcase is retried, ignore the original failure. |
| `check_title_template` | Optional. Template to configure the title format. Placeholders: {{FILE_NAME}}, {{SUITE_NAME}}, {{TEST_NAME}}. |
@@ -96,6 +97,41 @@ jobs:
| `job_name` | Optional. Specify the name of a check to update |
| `annotations_limit` | Optional. Specify the limit for annotations. This will also interrupt parsing all test-suites if the limit is reached. Defaults to: `No Limit`. |
+Common report_paths
+
+
+- Surefire:
+`**/target/surefire-reports/TEST-*.xml`
+- sbt:
+`**/target/test-reports/*.xml`
+
+
+
+
+Increase Node Heap Memory
+
+
+If you encounter an out-of-memory from Node, such as
+
+```
+FATAL ERROR: Ineffective mark-compacts near heap limit Allocation failed - JavaScript heap out of memory
+```
+
+you can increase the memory allocation by setting an environment variable
+
+```yaml
+- name: Publish Test Report
+ uses: mikepenz/action-junit-report@v4
+ env:
+ NODE_OPTIONS: "--max_old_space_size=4096"
+ if: success() || failure() # always run even if the previous step fails
+ with:
+ report_paths: '**/build/test-results/test/TEST-*.xml'
+```
+
+
+
+
### Action outputs
After action execution it will return the test counts as output.
@@ -116,7 +152,14 @@ A full set list of possible output values for this action.
### PR run permissions
-For [security reasons], the github token used for `pull_request` workflows is [marked as read-only].
+The action requires `write` permission on the checks. If the GA token is `read-only` (this is a repository configuration) please enable `write` permission via:
+
+```yml
+permissions:
+ checks: write
+```
+
+Additionally for [security reasons], the github token used for `pull_request` workflows is [marked as read-only].
If you want to post checks to a PR from an external repository, you will need to use a separate workflow
which has a read/write token, or use a PAT with elevated permissions.
@@ -227,7 +270,7 @@ Original idea and GitHub Actions by: https://github.com/ScaCap/action-surefire-r
## License
- Copyright (C) 2022 Mike Penz
+ Copyright (C) 2023 Mike Penz
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
diff --git a/__tests__/testParser.test.ts b/__tests__/testParser.test.ts
index 00d831ed..0465c36e 100644
--- a/__tests__/testParser.test.ts
+++ b/__tests__/testParser.test.ts
@@ -1,4 +1,4 @@
-import { resolveFileAndLine, resolvePath, parseFile, Transformer } from '../src/testParser'
+import {resolveFileAndLine, resolvePath, parseFile, Transformer, parseTestReports} from '../src/testParser'
/**
* Original test cases:
@@ -11,33 +11,33 @@ import { resolveFileAndLine, resolvePath, parseFile, Transformer } from '../src/
jest.setTimeout(30000)
describe('resolveFileAndLine', () => {
- it('should default to 1 if no line found', async () => {
- const { fileName, line } = await resolveFileAndLine(null, null, 'someClassName', 'not a stacktrace');
- expect(fileName).toBe('someClassName');
- expect(line).toBe(1);
- });
-
- it('should parse correctly fileName and line for a Java file', async () => {
- const { fileName, line } = await resolveFileAndLine(
- null,
- null,
- 'action.surefire.report.email.EmailAddressTest',
- `
+ it('should default to 1 if no line found', async () => {
+ const {fileName, line} = await resolveFileAndLine(null, null, 'someClassName', 'not a stacktrace')
+ expect(fileName).toBe('someClassName')
+ expect(line).toBe(1)
+ })
+
+ it('should parse correctly fileName and line for a Java file', async () => {
+ const {fileName, line} = await resolveFileAndLine(
+ null,
+ null,
+ 'action.surefire.report.email.EmailAddressTest',
+ `
action.surefire.report.email.InvalidEmailAddressException: Invalid email address 'user@ñandú.com.ar'
at action.surefire.report.email.EmailAddressTest.expectException(EmailAddressTest.java:74)
at action.surefire.report.email.EmailAddressTest.shouldNotContainInternationalizedHostNames(EmailAddressTest.java:39)
`
- );
- expect(fileName).toBe('EmailAddressTest');
- expect(line).toBe(39);
- });
-
- it('should parse correctly fileName and line for a Kotlin file', async () => {
- const { fileName, line } = await resolveFileAndLine(
- null,
- null,
- 'action.surefire.report.calc.CalcUtilsTest',
- `
+ )
+ expect(fileName).toBe('EmailAddressTest')
+ expect(line).toBe(39)
+ })
+
+ it('should parse correctly fileName and line for a Kotlin file', async () => {
+ const {fileName, line} = await resolveFileAndLine(
+ null,
+ null,
+ 'action.surefire.report.calc.CalcUtilsTest',
+ `
java.lang.AssertionError: unexpected exception type thrown; expected: but was:
at action.surefire.report.calc.CalcUtilsTest.test error handling(CalcUtilsTest.kt:27)
Caused by: java.lang.IllegalArgumentException: Amount must have max 2 non-zero decimal places
@@ -45,17 +45,17 @@ Caused by: java.lang.IllegalArgumentException: Amount must have max 2 non-zero d
at action.surefire.report.calc.CalcUtilsTest.access$scale(CalcUtilsTest.kt:9)
at action.surefire.report.calc.CalcUtilsTest.test error handling(CalcUtilsTest.kt:27)
`
- );
- expect(fileName).toBe('CalcUtilsTest');
- expect(line).toBe(27);
- });
-
- it('should parse correctly fileName and line for extended stacktrace', async () => {
- const { fileName, line } = await resolveFileAndLine(
- null,
- null,
- 'action.surefire.report.calc.StringUtilsTest',
- `
+ )
+ expect(fileName).toBe('CalcUtilsTest')
+ expect(line).toBe(27)
+ })
+
+ it('should parse correctly fileName and line for extended stacktrace', async () => {
+ const {fileName, line} = await resolveFileAndLine(
+ null,
+ null,
+ 'action.surefire.report.calc.StringUtilsTest',
+ `
java.lang.AssertionError:
Expected: (an instance of java.lang.IllegalArgumentException and exception with message a string containing "This is unexpected")
@@ -69,17 +69,17 @@ Stacktrace was: java.lang.IllegalArgumentException: Input='' didn't match condit
at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:418)
`
- );
- expect(fileName).toBe('StringUtilsTest');
- expect(line).toBe(26);
- });
-
- it('should parse correctly fileName and line for pytest', async () => {
- const { fileName, line } = await resolveFileAndLine(
- 'test.py',
- null,
- 'anything',
- `
+ )
+ expect(fileName).toBe('StringUtilsTest')
+ expect(line).toBe(26)
+ })
+
+ it('should parse correctly fileName and line for pytest', async () => {
+ const {fileName, line} = await resolveFileAndLine(
+ 'test.py',
+ null,
+ 'anything',
+ `
def
test_with_error():
event = { 'attr': 'test'}
@@ -88,653 +88,1029 @@ E AttributeError: 'dict' object has no attribute 'attr'
test.py:14: AttributeError
`
- );
- expect(fileName).toBe('test.py');
- expect(line).toBe(14);
- });
-
- it('should parse correctly line number for rust tests', async () => {
- const { fileName, line } = await resolveFileAndLine(
- null,
- null,
- 'project',
- `thread 'project::admission_webhook_tests::it_should_be_possible_to_update_projects' panicked at 'boom', tests/project/admission_webhook_tests.rs:48:38
+ )
+ expect(fileName).toBe('test.py')
+ expect(line).toBe(14)
+ })
+
+ it('should parse correctly line number for rust tests', async () => {
+ const {fileName, line} = await resolveFileAndLine(
+ null,
+ null,
+ 'project',
+ `thread 'project::admission_webhook_tests::it_should_be_possible_to_update_projects' panicked at 'boom', tests/project/admission_webhook_tests.rs:48:38
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
`
- );
- expect(line).toBe(48);
- expect(fileName).toBe('tests/project/admission_webhook_tests.rs');
- });
+ )
+ expect(line).toBe(48)
+ expect(fileName).toBe('tests/project/admission_webhook_tests.rs')
+ })
it('should parse correctly line number for rust tests 2', async () => {
- const { fileName, line } = await resolveFileAndLine(
+ const {fileName, line} = await resolveFileAndLine(
+ null,
null,
- null,
'project::manifest_secrets',
`thread 'project::manifest_secrets::it_should_skip_annotated_manifests' panicked at 'assertion failed: \`(left == right)\`\\n" +
' left: \`0\`,\\n' +
" right: \`42\`: all manifests should be skipped', tests/project/manifest_secrets.rs:305:5
`
- );
- expect(line).toBe(305);
- expect(fileName).toBe('tests/project/manifest_secrets.rs');
- });
-});
+ )
+ expect(line).toBe(305)
+ expect(fileName).toBe('tests/project/manifest_secrets.rs')
+ })
+})
describe('resolvePath', () => {
- it('should find correct file for Java fileName', async () => {
- const path = await resolvePath('EmailAddressTest', ['/build/', '/__pycache__/']);
- expect(path).toBe(
- 'test_results/tests/email/src/test/java/action/surefire/report/email/EmailAddressTest.java'
- );
- });
+ it('should find correct file for Java fileName', async () => {
+ const path = await resolvePath('EmailAddressTest', ['/build/', '/__pycache__/'])
+ expect(path).toBe('test_results/tests/email/src/test/java/action/surefire/report/email/EmailAddressTest.java')
+ })
- it('should find correct file for Kotlin fileName', async () => {
- const path = await resolvePath('CalcUtilsTest', ['/build/', '/__pycache__/']);
- expect(path).toBe('test_results/tests/utils/src/test/java/action/surefire/report/calc/CalcUtilsTest.kt');
- });
+ it('should find correct file for Kotlin fileName', async () => {
+ const path = await resolvePath('CalcUtilsTest', ['/build/', '/__pycache__/'])
+ expect(path).toBe('test_results/tests/utils/src/test/java/action/surefire/report/calc/CalcUtilsTest.kt')
+ })
- it('should find correct file with a relative path', async () => {
- const path = await resolvePath('./test_results/CalcUtilsTest.kt', ['/build/', '/__pycache__/']);
- expect(path).toBe('test_results/CalcUtilsTest.kt');
+ it('should find correct file with a relative path', async () => {
+ const path = await resolvePath('./test_results/CalcUtilsTest.kt', ['/build/', '/__pycache__/'])
+ expect(path).toBe('test_results/CalcUtilsTest.kt')
})
-});
+})
describe('parseFile', () => {
- it('should parse CalcUtils results', async () => {
- const { totalCount, skipped, annotations } = await parseFile(
- 'test_results/tests/utils/target/surefire-reports/TEST-action.surefire.report.calc.CalcUtilsTest.xml'
- );
-
- expect(totalCount).toBe(2);
- expect(skipped).toBe(0);
- expect(annotations).toStrictEqual([
- {
- path: 'test_results/tests/utils/src/test/java/action/surefire/report/calc/CalcUtilsTest.kt',
- start_line: 27,
- end_line: 27,
- start_column: 0,
- end_column: 0,
- annotation_level: 'failure',
- title: 'CalcUtilsTest.test error handling',
- message:
- 'unexpected exception type thrown; expected: but was:',
- raw_details:
- 'java.lang.AssertionError: unexpected exception type thrown; expected: but was:\n\tat action.surefire.report.calc.CalcUtilsTest.test error handling(CalcUtilsTest.kt:27)\nCaused by: java.lang.IllegalArgumentException: Amount must have max 2 non-zero decimal places\n\tat action.surefire.report.calc.CalcUtilsTest.scale(CalcUtilsTest.kt:31)\n\tat action.surefire.report.calc.CalcUtilsTest.access$scale(CalcUtilsTest.kt:9)\n\tat action.surefire.report.calc.CalcUtilsTest.test error handling(CalcUtilsTest.kt:27)'
- },
- {
- path: 'test_results/tests/utils/src/test/java/action/surefire/report/calc/CalcUtilsTest.kt',
- start_line: 15,
- end_line: 15,
- start_column: 0,
- end_column: 0,
- annotation_level: 'failure',
- title: 'CalcUtilsTest.test scale',
- message: 'Expected: <100.10>\n but: was <100.11>',
- raw_details:
- 'java.lang.AssertionError: \n\nExpected: <100.10>\n but: was <100.11>\n\tat action.surefire.report.calc.CalcUtilsTest.test scale(CalcUtilsTest.kt:15)'
- }
- ]);
- });
-
- it('should skip after reaching annotations_limit', async () => {
- const annotationsLimit = 1
- const { totalCount, skipped, annotations } = await parseFile(
- 'test_results/tests/utils/target/surefire-reports/TEST-action.surefire.report.calc.CalcUtilsTest.xml', undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, annotationsLimit
- );
-
- expect(totalCount).toBe(1);
- expect(skipped).toBe(0);
- expect(annotations).toStrictEqual([
- {
- path: 'test_results/tests/utils/src/test/java/action/surefire/report/calc/CalcUtilsTest.kt',
- start_line: 27,
- end_line: 27,
- start_column: 0,
- end_column: 0,
- annotation_level: 'failure',
- title: 'CalcUtilsTest.test error handling',
- message:
- 'unexpected exception type thrown; expected: but was:',
- raw_details:
- 'java.lang.AssertionError: unexpected exception type thrown; expected: but was:\n\tat action.surefire.report.calc.CalcUtilsTest.test error handling(CalcUtilsTest.kt:27)\nCaused by: java.lang.IllegalArgumentException: Amount must have max 2 non-zero decimal places\n\tat action.surefire.report.calc.CalcUtilsTest.scale(CalcUtilsTest.kt:31)\n\tat action.surefire.report.calc.CalcUtilsTest.access$scale(CalcUtilsTest.kt:9)\n\tat action.surefire.report.calc.CalcUtilsTest.test error handling(CalcUtilsTest.kt:27)'
- }
- ]);
- });
-
-
- it('should parse pytest results', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/python/report.xml');
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(3);
- expect(skipped).toBe(0);
- expect(filtered).toStrictEqual([
- {
- path: 'test_results/python/test_sample.py',
- start_line: 10,
- end_line: 10,
- start_column: 0,
- end_column: 0,
- annotation_level: 'failure',
- title: 'test_sample.test_which_fails',
- message: "AssertionError: assert 'test' == 'xyz'\n - xyz\n + test",
- raw_details:
- "def test_which_fails():\n event = { 'attr': 'test'}\n> assert event['attr'] == 'xyz'\nE AssertionError: assert 'test' == 'xyz'\nE - xyz\nE + test\n\npython/test_sample.py:10: AssertionError"
- },
- {
- path: 'test_results/python/test_sample.py',
- start_line: 14,
- end_line: 14,
- start_column: 0,
- end_column: 0,
- annotation_level: 'failure',
- title: 'test_sample.test_with_error',
- message: "AttributeError: 'dict' object has no attribute 'attr'",
- raw_details:
- "def test_with_error():\n event = { 'attr': 'test'}\n> assert event.attr == 'test'\nE AttributeError: 'dict' object has no attribute 'attr'\n\npython/test_sample.py:14: AttributeError"
- }
- ]);
- });
-
- it('should parse pytest results 2', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/python/report.xml', '', false, false, ['/build/', '/__pycache__/'], undefined, 'subproject/');
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(3);
- expect(skipped).toBe(0);
- expect(filtered).toStrictEqual([
- {
- path: 'subproject/test_results/python/test_sample.py',
- start_line: 10,
- end_line: 10,
- start_column: 0,
- end_column: 0,
- annotation_level: 'failure',
- title: 'test_sample.test_which_fails',
- message: "AssertionError: assert 'test' == 'xyz'\n - xyz\n + test",
- raw_details:
- "def test_which_fails():\n event = { 'attr': 'test'}\n> assert event['attr'] == 'xyz'\nE AssertionError: assert 'test' == 'xyz'\nE - xyz\nE + test\n\npython/test_sample.py:10: AssertionError"
- },
- {
- path: 'subproject/test_results/python/test_sample.py',
- start_line: 14,
- end_line: 14,
- start_column: 0,
- end_column: 0,
- annotation_level: 'failure',
- title: 'test_sample.test_with_error',
- message: "AttributeError: 'dict' object has no attribute 'attr'",
- raw_details:
- "def test_with_error():\n event = { 'attr': 'test'}\n> assert event.attr == 'test'\nE AttributeError: 'dict' object has no attribute 'attr'\n\npython/test_sample.py:14: AttributeError"
- }
- ]);
- });
-
- it('should parse marathon results', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/marathon_tests/com.mikepenz.DummyTest#test_02_dummy.xml');
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(1);
- expect(skipped).toBe(0);
- expect(filtered).toStrictEqual([]);
- });
-
- it('should parse marathon results and retrieve message', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/marathon_tests/com.mikepenz.DummyTest3#test_01.xml');
-
- expect(totalCount).toBe(1);
- expect(skipped).toBe(0);
- expect(annotations).toStrictEqual([
- {
- "annotation_level": "failure",
- "end_column": 0,
- "end_line": 1,
- "message": "test_01",
- "path": "DummyTest3",
- "raw_details": "",
- "start_column": 0,
- "start_line": 1,
- "title": "DummyTest3.test_01",
- }
- ]);
- });
-
- it('should parse and fail marathon results', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/marathon_tests/com.mikepenz.DummyUtilTest#test_01_dummy.xml');
-
- expect(totalCount).toBe(1);
- expect(skipped).toBe(0);
- expect(annotations).toStrictEqual([
- {
- "annotation_level": "failure",
- "end_column": 0,
- "end_line": 1,
- "message": "java.io.FileNotFoundException: No content provider: content://com.xyz/photo.jpg\nat android.content.ContentResolver.openTypedAssetFileDescriptor(ContentResolver.java:1969)",
- "path": "DummyUtilTest",
- "raw_details": "java.io.FileNotFoundException: No content provider: content://com.xyz/photo.jpg\nat android.content.ContentResolver.openTypedAssetFileDescriptor(ContentResolver.java:1969)\nat android.app.Instrumentation$InstrumentationThread.run(Instrumentation.java:2205)",
- "start_column": 0,
- "start_line": 1,
- "title": "DummyUtilTest.test_01_dummy",
- },
- ]);
- });
-
- it('should parse empty cunit results', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/cunit/testEmpty.xml');
-
- expect(totalCount).toBe(0);
- expect(skipped).toBe(0);
- expect(annotations).toStrictEqual([]);
- });
-
- it('should parse failure cunit results', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/cunit/testFailure.xml');
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(4);
- expect(skipped).toBe(0);
- expect(filtered).toStrictEqual([
- {
- "annotation_level": "failure",
- "end_column": 0,
- "end_line": 1,
- "message": "false == something.loadXml(xml_string)",
- "path": "loadFromXMLString_When_Should2Test",
- "raw_details": "false == something.loadXml(xml_string)\nFile: /dumm/core/tests/testFailure.cpp\nLine: 77",
- "start_column": 0,
- "start_line": 1,
- "title": "loadFromXMLString_When_Should2Test",
- },
- ]);
- });
-
- it('should parse correctly fileName and line for a Java file with invalid chars', async () => {
- const { fileName, line } = await resolveFileAndLine(
- null,
- null,
- 'action.surefire.report.email.EmailAddressTest++',
- `
+ it('should parse CalcUtils results', async () => {
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/tests/utils/target/surefire-reports/TEST-action.surefire.report.calc.CalcUtilsTest.xml'
+ )
+
+ expect(totalCount).toBe(2)
+ expect(skipped).toBe(0)
+ expect(annotations).toStrictEqual([
+ {
+ path: 'test_results/tests/utils/src/test/java/action/surefire/report/calc/CalcUtilsTest.kt',
+ start_line: 27,
+ end_line: 27,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'CalcUtilsTest.test error handling',
+ message:
+ 'unexpected exception type thrown; expected: but was:',
+ raw_details:
+ 'java.lang.AssertionError: unexpected exception type thrown; expected: but was:\n\tat action.surefire.report.calc.CalcUtilsTest.test error handling(CalcUtilsTest.kt:27)\nCaused by: java.lang.IllegalArgumentException: Amount must have max 2 non-zero decimal places\n\tat action.surefire.report.calc.CalcUtilsTest.scale(CalcUtilsTest.kt:31)\n\tat action.surefire.report.calc.CalcUtilsTest.access$scale(CalcUtilsTest.kt:9)\n\tat action.surefire.report.calc.CalcUtilsTest.test error handling(CalcUtilsTest.kt:27)'
+ },
+ {
+ path: 'test_results/tests/utils/src/test/java/action/surefire/report/calc/CalcUtilsTest.kt',
+ start_line: 15,
+ end_line: 15,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'CalcUtilsTest.test scale',
+ message: 'Expected: <100.10>\n but: was <100.11>',
+ raw_details:
+ 'java.lang.AssertionError: \n\nExpected: <100.10>\n but: was <100.11>\n\tat action.surefire.report.calc.CalcUtilsTest.test scale(CalcUtilsTest.kt:15)'
+ }
+ ])
+ })
+
+ it('should skip after reaching annotations_limit', async () => {
+ const annotationsLimit = 1
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/tests/utils/target/surefire-reports/TEST-action.surefire.report.calc.CalcUtilsTest.xml',
+ undefined,
+ undefined,
+ undefined,
+ undefined,
+ undefined,
+ undefined,
+ undefined,
+ undefined,
+ annotationsLimit
+ )
+
+ expect(totalCount).toBe(1)
+ expect(skipped).toBe(0)
+ expect(annotations).toStrictEqual([
+ {
+ path: 'test_results/tests/utils/src/test/java/action/surefire/report/calc/CalcUtilsTest.kt',
+ start_line: 27,
+ end_line: 27,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'CalcUtilsTest.test error handling',
+ message:
+ 'unexpected exception type thrown; expected: but was:',
+ raw_details:
+ 'java.lang.AssertionError: unexpected exception type thrown; expected: but was:\n\tat action.surefire.report.calc.CalcUtilsTest.test error handling(CalcUtilsTest.kt:27)\nCaused by: java.lang.IllegalArgumentException: Amount must have max 2 non-zero decimal places\n\tat action.surefire.report.calc.CalcUtilsTest.scale(CalcUtilsTest.kt:31)\n\tat action.surefire.report.calc.CalcUtilsTest.access$scale(CalcUtilsTest.kt:9)\n\tat action.surefire.report.calc.CalcUtilsTest.test error handling(CalcUtilsTest.kt:27)'
+ }
+ ])
+ })
+
+ it('should parse pytest results', async () => {
+ const {totalCount, skipped, annotations} = await parseFile('test_results/python/report.xml')
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(3)
+ expect(skipped).toBe(0)
+ expect(filtered).toStrictEqual([
+ {
+ path: 'test_results/python/test_sample.py',
+ start_line: 10,
+ end_line: 10,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'test_sample.test_which_fails',
+ message: "AssertionError: assert 'test' == 'xyz'\n - xyz\n + test",
+ raw_details:
+ "def test_which_fails():\n event = { 'attr': 'test'}\n> assert event['attr'] == 'xyz'\nE AssertionError: assert 'test' == 'xyz'\nE - xyz\nE + test\n\npython/test_sample.py:10: AssertionError"
+ },
+ {
+ path: 'test_results/python/test_sample.py',
+ start_line: 14,
+ end_line: 14,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'test_sample.test_with_error',
+ message: "AttributeError: 'dict' object has no attribute 'attr'",
+ raw_details:
+ "def test_with_error():\n event = { 'attr': 'test'}\n> assert event.attr == 'test'\nE AttributeError: 'dict' object has no attribute 'attr'\n\npython/test_sample.py:14: AttributeError"
+ }
+ ])
+ })
+
+ it('should parse pytest results 2', async () => {
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/python/report.xml',
+ '',
+ false,
+ false,
+ ['/build/', '/__pycache__/'],
+ undefined,
+ 'subproject/'
+ )
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(3)
+ expect(skipped).toBe(0)
+ expect(filtered).toStrictEqual([
+ {
+ path: 'subproject/test_results/python/test_sample.py',
+ start_line: 10,
+ end_line: 10,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'test_sample.test_which_fails',
+ message: "AssertionError: assert 'test' == 'xyz'\n - xyz\n + test",
+ raw_details:
+ "def test_which_fails():\n event = { 'attr': 'test'}\n> assert event['attr'] == 'xyz'\nE AssertionError: assert 'test' == 'xyz'\nE - xyz\nE + test\n\npython/test_sample.py:10: AssertionError"
+ },
+ {
+ path: 'subproject/test_results/python/test_sample.py',
+ start_line: 14,
+ end_line: 14,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'test_sample.test_with_error',
+ message: "AttributeError: 'dict' object has no attribute 'attr'",
+ raw_details:
+ "def test_with_error():\n event = { 'attr': 'test'}\n> assert event.attr == 'test'\nE AttributeError: 'dict' object has no attribute 'attr'\n\npython/test_sample.py:14: AttributeError"
+ }
+ ])
+ })
+
+ it('should parse marathon results', async () => {
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/marathon_tests/com.mikepenz.DummyTest#test_02_dummy.xml'
+ )
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(1)
+ expect(skipped).toBe(0)
+ expect(filtered).toStrictEqual([])
+ })
+
+ it('should parse marathon results and retrieve message', async () => {
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/marathon_tests/com.mikepenz.DummyTest3#test_01.xml'
+ )
+
+ expect(totalCount).toBe(1)
+ expect(skipped).toBe(0)
+ expect(annotations).toStrictEqual([
+ {
+ annotation_level: 'failure',
+ end_column: 0,
+ end_line: 1,
+ message: 'test_01',
+ path: 'DummyTest3',
+ raw_details: '',
+ start_column: 0,
+ start_line: 1,
+ status: 'failure',
+ title: 'DummyTest3.test_01'
+ }
+ ])
+ })
+
+ it('should parse and fail marathon results', async () => {
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/marathon_tests/com.mikepenz.DummyUtilTest#test_01_dummy.xml'
+ )
+
+ expect(totalCount).toBe(1)
+ expect(skipped).toBe(0)
+ expect(annotations).toStrictEqual([
+ {
+ annotation_level: 'failure',
+ end_column: 0,
+ end_line: 1,
+ message:
+ 'java.io.FileNotFoundException: No content provider: content://com.xyz/photo.jpg\nat android.content.ContentResolver.openTypedAssetFileDescriptor(ContentResolver.java:1969)',
+ path: 'DummyUtilTest',
+ raw_details:
+ 'java.io.FileNotFoundException: No content provider: content://com.xyz/photo.jpg\nat android.content.ContentResolver.openTypedAssetFileDescriptor(ContentResolver.java:1969)\nat android.app.Instrumentation$InstrumentationThread.run(Instrumentation.java:2205)',
+ start_column: 0,
+ start_line: 1,
+ status: 'failure',
+ title: 'DummyUtilTest.test_01_dummy'
+ }
+ ])
+ })
+
+ it('should parse empty cunit results', async () => {
+ const {totalCount, skipped, annotations} = await parseFile('test_results/cunit/testEmpty.xml')
+
+ expect(totalCount).toBe(0)
+ expect(skipped).toBe(0)
+ expect(annotations).toStrictEqual([])
+ })
+
+ it('should parse failure cunit results', async () => {
+ const {totalCount, skipped, annotations} = await parseFile('test_results/cunit/testFailure.xml')
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(4)
+ expect(skipped).toBe(0)
+ expect(filtered).toStrictEqual([
+ {
+ annotation_level: 'failure',
+ end_column: 0,
+ end_line: 1,
+ message: 'false == something.loadXml(xml_string)',
+ path: 'loadFromXMLString_When_Should2Test',
+ raw_details: 'false == something.loadXml(xml_string)\nFile: /dumm/core/tests/testFailure.cpp\nLine: 77',
+ start_column: 0,
+ start_line: 1,
+ status: 'failure',
+ title: 'loadFromXMLString_When_Should2Test'
+ }
+ ])
+ })
+
+ it('should parse correctly fileName and line for a Java file with invalid chars', async () => {
+ const {fileName, line} = await resolveFileAndLine(
+ null,
+ null,
+ 'action.surefire.report.email.EmailAddressTest++',
+ `
action.surefire.report.email.InvalidEmailAddressException: Invalid email address 'user@ñandú.com.ar'
at action.surefire.report.email.EmailAddressTest.expectException(EmailAddressTest++.java:74)
at action.surefire.report.email.EmailAddressTest.shouldNotContainInternationalizedHostNames(EmailAddressTest++.java:39)
`
- );
- expect(fileName).toBe('EmailAddressTest++');
- expect(line).toBe(39);
- });
-
- it('should parse correctly nested test suites', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/nested/junit.xml', 'Test*');
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(5);
- expect(skipped).toBe(0);
- expect(filtered).toStrictEqual([{
- "path": "A",
- "start_line": 1,
- "end_line": 1,
- "start_column": 0,
- "end_column": 0,
- "annotation_level": "failure",
- "title": "All tests/tests/packet/TestA/A",
- "message": "failure",
- "raw_details": ""
- }, {
- "path": "B",
- "start_line": 1,
- "end_line": 1,
- "start_column": 0,
- "end_column": 0,
- "annotation_level": "failure",
- "title": "All tests/tests/packet/TestB/B",
- "message": "failure",
- "raw_details": ""
- }, {
- "path": "A",
- "start_line": 1,
- "end_line": 1,
- "start_column": 0,
- "end_column": 0,
- "annotation_level": "failure",
- "title": "All tests/tests/packet/A",
- "message": "failure",
- "raw_details": ""
- }]);
- });
-
- it('should parse disabled tests', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/issues/testDisabled.xml', '*');
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(22);
- expect(skipped).toBe(10);
- expect(filtered).toStrictEqual([{
- path: "factorial_of_value_from_fixture",
- start_line: 1,
- end_line: 1,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "factorial/factorial_of_value_from_fixture",
- message: "tests/failed/main.cpp:58: error: check_eq(3628800, 3628801)",
- raw_details: "",
- }, {
- path: "factorial_of_value_from_fixture[0]",
- start_line: 1,
- end_line: 1,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "factorial/factorial_of_value_from_fixture[0]",
- message: "tests/failed/main.cpp:97: error: condition was false",
- raw_details: "",
- }, {
- path: "positive_arguments_must_produce_expected_result",
- start_line: 1,
- end_line: 1,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "factorial/positive_arguments_must_produce_expected_result",
- message: "uncaught std::exception: thrown by test",
- raw_details: "",
- }, {
- path: "positive_arguments_must_produce_expected_result[2]",
- start_line: 1,
- end_line: 1,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "factorial/positive_arguments_must_produce_expected_result[2]",
- message: "tests/failed/main.cpp:73: error: condition was false",
- raw_details: "",
- }, {
- path: "test_which_fails_check_eq_with_custom_message",
- start_line: 1,
- end_line: 1,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "factorial/test_which_fails_check_eq_with_custom_message",
- message: "tests/failed/main.cpp:49: error: check_eq(6, 7): hello world!",
- raw_details: "",
- }, {
- path: "test_which_throws_unknown_exception",
- start_line: 1,
- end_line: 1,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "factorial/test_which_throws_unknown_exception",
- message: "uncaught unknown exception",
- raw_details: "",
- }]);
- });
-
- it('parse mocha test case', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/mocha/mocha.xml', '*', true);
-
- expect(totalCount).toBe(1);
- expect(skipped).toBe(0);
- expect(annotations).toStrictEqual([{
- "path": "/path/test/config.js",
- "start_line": 1,
- "end_line": 1,
- "start_column": 0,
- "end_column": 0,
- "annotation_level": "notice",
- "title": "/path/test/config.js.default config/Config files default config projectUTCOffset should be a callable with current UTC offset",
- "message": "Config files default config projectUTCOffset should be a callable with current UTC offset",
- "raw_details": ""
- }]);
- });
-
- it('parse mocha test case, custom title template', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/mocha/mocha.xml', '*', true, false, ['/build/', '/__pycache__/'], '{{TEST_NAME}}');
-
- expect(totalCount).toBe(1);
- expect(skipped).toBe(0);
- expect(annotations).toStrictEqual([{
- "path": "/path/test/config.js",
- "start_line": 1,
- "end_line": 1,
- "start_column": 0,
- "end_column": 0,
- "annotation_level": "notice",
- "title": "Config files default config projectUTCOffset should be a callable with current UTC offset",
- "message": "Config files default config projectUTCOffset should be a callable with current UTC offset",
- "raw_details": ""
- }]);
- });
-
- it('parse mocha test case, test files prefix', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/mocha/mocha.xml', '*', true, false, ['/build/', '/__pycache__/'], '{{TEST_NAME}}', 'subproject');
-
- expect(totalCount).toBe(1);
- expect(skipped).toBe(0);
- expect(annotations).toStrictEqual([{
- "path": "subproject/path/test/config.js",
- "start_line": 1,
- "end_line": 1,
- "start_column": 0,
- "end_column": 0,
- "annotation_level": "notice",
- "title": "Config files default config projectUTCOffset should be a callable with current UTC offset",
- "message": "Config files default config projectUTCOffset should be a callable with current UTC offset",
- "raw_details": ""
- }]);
- });
-
- it('should parse xunit results', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/xunit/report.xml');
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(4);
- expect(skipped).toBe(0);
- expect(filtered).toStrictEqual([
- {
- path: "main.c",
- start_line: 38,
- end_line: 38,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "main.c.test_my_sum_fail",
- message: "Expected 2 Was 0",
- raw_details: "",
- }
- ]);
- });
-
- it('should parse xunit results with file and line on failure', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/xunit/report_fl_on_f.xml');
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(4);
- expect(skipped).toBe(0);
- expect(filtered).toStrictEqual([
- {
- path: "main.c",
- start_line: 38,
- end_line: 38,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "main.c.test_my_sum_fail",
- message: "Expected 2 Was 0",
- raw_details: "",
- }
- ]);
- });
-
- it('should parse junit web test results', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/junit-web-test/expected.xml');
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(6);
- expect(skipped).toBe(1);
- expect(filtered).toStrictEqual([
- {
- path: "packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js",
- start_line: 15,
- end_line: 15,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js.asserts error",
- message: "expected false to be true",
- raw_details: "AssertionError: expected false to be true\n at o. (packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js:15:29)",
- }
- ]);
- });
-
- it('should handle retries', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/junit-web-test/expectedRetries.xml', '', false, true, ['/build/', '/__pycache__/']);
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(7);
- expect(skipped).toBe(1);
- expect(filtered).toStrictEqual([
- {
- path: "packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js",
- start_line: 15,
- end_line: 15,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js.asserts error",
- message: "expected false to be true",
- raw_details: "AssertionError: expected false to be true\n at o. (packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js:15:29)",
- }
- ]);
- });
-
- it('there should be two errors if retries are not handled', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/junit-web-test/expectedRetries.xml', '', false);
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(8);
- expect(skipped).toBe(1);
- expect(filtered).toStrictEqual([
- {
- path: "packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js",
- start_line: 15,
- end_line: 15,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js.asserts error",
- message: "expected false to be true",
- raw_details: "AssertionError: expected false to be true\n at o. (packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js:15:29)",
- },
- {
- annotation_level: "failure",
- end_column: 0,
- end_line: 15,
- message: "this is flaky, so is retried",
- path: "packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js",
- raw_details: "AssertionError: expected false to be true\n at o. (packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js:15:29)",
- start_column: 0,
- start_line: 15,
- title: "packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js.retried flaky test",
- }
- ]);
- });
-
- it('should parse and transform perl results', async () => {
-
- const transformer: Transformer[] = [
- {
- searchValue: "\\.",
- replaceValue: "/",
- },
- {
- searchValue: "(.+?)_t",
- replaceValue: "$1\.t",
- }
- ]
- const { totalCount, skipped, annotations } = await parseFile('test_results/perl/result.xml', '', true, undefined, undefined, undefined, undefined, transformer);
-
- expect(totalCount).toBe(1);
- expect(skipped).toBe(0);
- expect(annotations).toStrictEqual([
- {
- path: "FileName.t",
- start_line: 1,
- end_line: 1,
- start_column: 0,
- end_column: 0,
- annotation_level: "notice",
- title: "FileName_t.L123: ...",
- message: "L123: ...",
- raw_details: "",
- },
- ]);
- });
-
- it('should parse and transform container-structure results (with no testsuite attributes)', async () => {
-
- const { totalCount, skipped, annotations } = await parseFile('test_results/container-structure/test.xml', '', true, undefined, undefined, undefined, undefined, undefined);
-
- expect(totalCount).toBe(3);
- expect(skipped).toBe(0);
- expect(annotations).toStrictEqual([
- {
- path: "Command Test: apt-get upgrade",
- start_line: 1,
- end_line: 1,
- start_column: 0,
- end_column: 0,
- annotation_level: "notice",
- title: "Command Test: apt-get upgrade",
- message: "Command Test: apt-get upgrade",
- raw_details: "",
- },
- {
- path: "File Existence Test: /home/app/app",
- start_line: 1,
- end_line: 1,
- start_column: 0,
- end_column: 0,
- annotation_level: "notice",
- title: "File Existence Test: /home/app/app",
- message: "File Existence Test: /home/app/app",
- raw_details: "",
- },
- {
- path: "Metadata Test",
- start_line: 1,
- end_line: 1,
- start_column: 0,
- end_column: 0,
- annotation_level: "notice",
- title: "Metadata Test",
- message: "Metadata Test",
- raw_details: "",
- },
- ]);
- });
-
- it('should parse catch2 results with file and line on failure', async () => {
- const { totalCount, skipped, annotations } = await parseFile('test_results/catch2/report.xml');
- const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
-
- expect(totalCount).toBe(1);
- expect(skipped).toBe(0);
- expect(filtered).toStrictEqual([
- {
- path: "test/unit/detail/utility/is_constant_evaluated.cpp",
- start_line: 19,
- end_line: 19,
- start_column: 0,
- end_column: 0,
- annotation_level: "failure",
- title: "test/unit/detail/utility/is_constant_evaluated.cpp.is constant evaluated",
- message: "REQUIRE(v == 1) expands to 0 == 10",
- raw_details: "FAILED:\n REQUIRE( v == 1 )\nwith expansion:\n 0 == 1\n0\nat /__w/futures/futures/test/unit/detail/utility/is_constant_evaluated.cpp:19",
- }
- ]);
- });
-});
+ )
+ expect(fileName).toBe('EmailAddressTest++')
+ expect(line).toBe(39)
+ })
+
+ it('should parse correctly nested test suites', async () => {
+ const {totalCount, skipped, annotations} = await parseFile('test_results/nested/junit.xml', 'Test*')
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(5)
+ expect(skipped).toBe(0)
+ expect(filtered).toStrictEqual([
+ {
+ path: 'A',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'All tests/tests/packet/TestA/A',
+ message: 'failure',
+ raw_details: ''
+ },
+ {
+ path: 'B',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'All tests/tests/packet/TestB/B',
+ message: 'failure',
+ raw_details: ''
+ },
+ {
+ path: 'A',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'All tests/tests/packet/A',
+ message: 'failure',
+ raw_details: ''
+ }
+ ])
+ })
+
+ it('should parse disabled tests', async () => {
+ const {totalCount, skipped, annotations} = await parseFile('test_results/issues/testDisabled.xml', '*')
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+ const notice = annotations.filter(annotation => annotation.annotation_level === 'notice')
+
+ expect(totalCount).toBe(22)
+ expect(skipped).toBe(10)
+ expect(filtered).toStrictEqual([
+ {
+ path: 'factorial_of_value_from_fixture',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'factorial/factorial_of_value_from_fixture',
+ message: 'tests/failed/main.cpp:58: error: check_eq(3628800, 3628801)',
+ raw_details: ''
+ },
+ {
+ path: 'factorial_of_value_from_fixture[0]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'factorial/factorial_of_value_from_fixture[0]',
+ message: 'tests/failed/main.cpp:97: error: condition was false',
+ raw_details: ''
+ },
+ {
+ path: 'positive_arguments_must_produce_expected_result',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'factorial/positive_arguments_must_produce_expected_result',
+ message: 'uncaught std::exception: thrown by test',
+ raw_details: ''
+ },
+ {
+ path: 'positive_arguments_must_produce_expected_result[2]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'factorial/positive_arguments_must_produce_expected_result[2]',
+ message: 'tests/failed/main.cpp:73: error: condition was false',
+ raw_details: ''
+ },
+ {
+ path: 'test_which_fails_check_eq_with_custom_message',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'factorial/test_which_fails_check_eq_with_custom_message',
+ message: 'tests/failed/main.cpp:49: error: check_eq(6, 7): hello world!',
+ raw_details: ''
+ },
+ {
+ path: 'test_which_throws_unknown_exception',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'factorial/test_which_throws_unknown_exception',
+ message: 'uncaught unknown exception',
+ raw_details: ''
+ }
+ ])
+
+ expect(notice).toStrictEqual([
+ {
+ path: 'disabled_fixture_test',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'factorial/disabled_fixture_test',
+ message: 'disabled_fixture_test',
+ raw_details: ''
+ },
+ {
+ path: 'disabled_param_fixture_test[0]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'factorial/disabled_param_fixture_test[0]',
+ message: 'disabled_param_fixture_test[0]',
+ raw_details: ''
+ },
+ {
+ path: 'disabled_param_fixture_test[1]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'factorial/disabled_param_fixture_test[1]',
+ message: 'disabled_param_fixture_test[1]',
+ raw_details: ''
+ },
+ {
+ path: 'disabled_param_fixture_test[2]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'factorial/disabled_param_fixture_test[2]',
+ message: 'disabled_param_fixture_test[2]',
+ raw_details: ''
+ },
+ {
+ path: 'disabled_param_fixture_test[3]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'factorial/disabled_param_fixture_test[3]',
+ message: 'disabled_param_fixture_test[3]',
+ raw_details: ''
+ },
+ {
+ path: 'disabled_param_test[0]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'factorial/disabled_param_test[0]',
+ message: 'disabled_param_test[0]',
+ raw_details: ''
+ },
+ {
+ path: 'disabled_param_test[1]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'factorial/disabled_param_test[1]',
+ message: 'disabled_param_test[1]',
+ raw_details: ''
+ },
+ {
+ path: 'disabled_param_test[2]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'factorial/disabled_param_test[2]',
+ message: 'disabled_param_test[2]',
+ raw_details: ''
+ },
+ {
+ path: 'disabled_param_test[3]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'factorial/disabled_param_test[3]',
+ message: 'disabled_param_test[3]',
+ raw_details: ''
+ },
+ {
+ path: 'disabled_test',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'factorial/disabled_test',
+ message: 'disabled_test',
+ raw_details: ''
+ },
+ {
+ path: 'factorial_of_value_from_fixture[1]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'factorial/factorial_of_value_from_fixture[1]',
+ message: 'factorial_of_value_from_fixture[1]',
+ raw_details: ''
+ },
+ {
+ path: 'factorial_of_value_from_fixture[2]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'factorial/factorial_of_value_from_fixture[2]',
+ message: 'factorial_of_value_from_fixture[2]',
+ raw_details: ''
+ },
+ {
+ path: 'factorial_of_value_from_fixture[3]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'factorial/factorial_of_value_from_fixture[3]',
+ message: 'factorial_of_value_from_fixture[3]',
+ raw_details: ''
+ },
+ {
+ path: 'positive_arguments_must_produce_expected_result[0]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'factorial/positive_arguments_must_produce_expected_result[0]',
+ message: 'positive_arguments_must_produce_expected_result[0]',
+ raw_details: ''
+ },
+ {
+ path: 'positive_arguments_must_produce_expected_result[1]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'factorial/positive_arguments_must_produce_expected_result[1]',
+ message: 'positive_arguments_must_produce_expected_result[1]',
+ raw_details: ''
+ },
+ {
+ path: 'positive_arguments_must_produce_expected_result[3]',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'factorial/positive_arguments_must_produce_expected_result[3]',
+ message: 'positive_arguments_must_produce_expected_result[3]',
+ raw_details: ''
+ }
+ ])
+ })
+
+ it('should parse disabled tests', async () => {
+ const {checkName, summary, totalCount, skipped, failed, passed, annotations} = await parseTestReports(
+ 'checkName',
+ 'summary',
+ 'test_results/issues/testFailedDisabled.xml',
+ '*',
+ true,
+ true,
+ []
+ )
+
+ expect(checkName).toBe('checkName')
+ expect(summary).toBe('summary')
+ expect(totalCount).toBe(1)
+ expect(skipped).toBe(1)
+ expect(failed).toBe(0)
+ expect(passed).toBe(0)
+ expect(annotations).toStrictEqual([
+ {
+ path: 'MiscTests - OS X',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'skipped',
+ title: 'MiscTests - OS X.random suite/testSmemArithmetic',
+ message: 'Assert: Boolean true check failed.',
+ raw_details: 'Assert: Boolean true check failed.'
+ }
+ ])
+ })
+
+ it('parse mocha test case', async () => {
+ const {totalCount, skipped, annotations} = await parseFile('test_results/mocha/mocha.xml', '*', true)
+
+ expect(totalCount).toBe(1)
+ expect(skipped).toBe(0)
+ expect(annotations).toStrictEqual([
+ {
+ path: '/path/test/config.js',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title:
+ '/path/test/config.js.default config/Config files default config projectUTCOffset should be a callable with current UTC offset',
+ message: 'Config files default config projectUTCOffset should be a callable with current UTC offset',
+ raw_details: ''
+ }
+ ])
+ })
+
+ it('parse mocha test case, custom title template', async () => {
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/mocha/mocha.xml',
+ '*',
+ true,
+ false,
+ ['/build/', '/__pycache__/'],
+ '{{TEST_NAME}}'
+ )
+
+ expect(totalCount).toBe(1)
+ expect(skipped).toBe(0)
+ expect(annotations).toStrictEqual([
+ {
+ path: '/path/test/config.js',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'Config files default config projectUTCOffset should be a callable with current UTC offset',
+ message: 'Config files default config projectUTCOffset should be a callable with current UTC offset',
+ raw_details: ''
+ }
+ ])
+ })
+
+ it('parse mocha test case, test files prefix', async () => {
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/mocha/mocha.xml',
+ '*',
+ true,
+ false,
+ ['/build/', '/__pycache__/'],
+ '{{TEST_NAME}}',
+ 'subproject'
+ )
+
+ expect(totalCount).toBe(1)
+ expect(skipped).toBe(0)
+ expect(annotations).toStrictEqual([
+ {
+ path: 'subproject/path/test/config.js',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'Config files default config projectUTCOffset should be a callable with current UTC offset',
+ message: 'Config files default config projectUTCOffset should be a callable with current UTC offset',
+ raw_details: ''
+ }
+ ])
+ })
+
+ it('should parse xunit results', async () => {
+ const {totalCount, skipped, annotations} = await parseFile('test_results/xunit/report.xml')
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(4)
+ expect(skipped).toBe(0)
+ expect(filtered).toStrictEqual([
+ {
+ path: 'main.c',
+ start_line: 38,
+ end_line: 38,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'main.c.test_my_sum_fail',
+ message: 'Expected 2 Was 0',
+ raw_details: ''
+ }
+ ])
+ })
+
+ it('should parse xunit results with file and line on failure', async () => {
+ const {totalCount, skipped, annotations} = await parseFile('test_results/xunit/report_fl_on_f.xml')
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(4)
+ expect(skipped).toBe(0)
+ expect(filtered).toStrictEqual([
+ {
+ path: 'main.c',
+ start_line: 38,
+ end_line: 38,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'main.c.test_my_sum_fail',
+ message: 'Expected 2 Was 0',
+ raw_details: ''
+ }
+ ])
+ })
+
+ it('should parse junit web test results', async () => {
+ const {totalCount, skipped, annotations} = await parseFile('test_results/junit-web-test/expected.xml')
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(6)
+ expect(skipped).toBe(1)
+ expect(filtered).toStrictEqual([
+ {
+ path: 'packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js',
+ start_line: 15,
+ end_line: 15,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js.asserts error',
+ message: 'expected false to be true',
+ raw_details:
+ 'AssertionError: expected false to be true\n at o. (packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js:15:29)'
+ }
+ ])
+ })
+
+ it('should handle retries', async () => {
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/junit-web-test/expectedRetries.xml',
+ '',
+ false,
+ true,
+ ['/build/', '/__pycache__/']
+ )
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(7)
+ expect(skipped).toBe(1)
+ expect(filtered).toStrictEqual([
+ {
+ path: 'packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js',
+ start_line: 15,
+ end_line: 15,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js.asserts error',
+ message: 'expected false to be true',
+ raw_details:
+ 'AssertionError: expected false to be true\n at o. (packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js:15:29)'
+ }
+ ])
+ })
+
+ it('there should be two errors if retries are not handled', async () => {
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/junit-web-test/expectedRetries.xml',
+ '',
+ false
+ )
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(8)
+ expect(skipped).toBe(1)
+ expect(filtered).toStrictEqual([
+ {
+ path: 'packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js',
+ start_line: 15,
+ end_line: 15,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js.asserts error',
+ message: 'expected false to be true',
+ raw_details:
+ 'AssertionError: expected false to be true\n at o. (packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js:15:29)'
+ },
+ {
+ annotation_level: 'failure',
+ end_column: 0,
+ end_line: 15,
+ message: 'this is flaky, so is retried',
+ path: 'packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js',
+ raw_details:
+ 'AssertionError: expected false to be true\n at o. (packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js:15:29)',
+ start_column: 0,
+ start_line: 15,
+ status: 'failure',
+ title: 'packages/test-runner-junit-reporter/test/fixtures/multiple/simple-test.js.retried flaky test'
+ }
+ ])
+ })
+
+ it('should parse and transform perl results', async () => {
+ const transformer: Transformer[] = [
+ {
+ searchValue: '\\.',
+ replaceValue: '/'
+ },
+ {
+ searchValue: '(.+?)_t',
+ replaceValue: '$1.t'
+ }
+ ]
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/perl/result.xml',
+ '',
+ true,
+ undefined,
+ undefined,
+ undefined,
+ undefined,
+ transformer
+ )
+
+ expect(totalCount).toBe(1)
+ expect(skipped).toBe(0)
+ expect(annotations).toStrictEqual([
+ {
+ path: 'FileName.t',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'FileName_t.L123: ...',
+ message: 'L123: ...',
+ raw_details: ''
+ }
+ ])
+ })
+
+ it('should parse and transform container-structure results (with no testsuite attributes)', async () => {
+ const {totalCount, skipped, annotations} = await parseFile(
+ 'test_results/container-structure/test.xml',
+ '',
+ true,
+ undefined,
+ undefined,
+ undefined,
+ undefined,
+ undefined
+ )
+
+ expect(totalCount).toBe(3)
+ expect(skipped).toBe(0)
+ expect(annotations).toStrictEqual([
+ {
+ path: 'Command Test: apt-get upgrade',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'Command Test: apt-get upgrade',
+ message: 'Command Test: apt-get upgrade',
+ raw_details: ''
+ },
+ {
+ path: 'File Existence Test: /home/app/app',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'File Existence Test: /home/app/app',
+ message: 'File Existence Test: /home/app/app',
+ raw_details: ''
+ },
+ {
+ path: 'Metadata Test',
+ start_line: 1,
+ end_line: 1,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'notice',
+ status: 'success',
+ title: 'Metadata Test',
+ message: 'Metadata Test',
+ raw_details: ''
+ }
+ ])
+ })
+
+ it('should parse catch2 results with file and line on failure', async () => {
+ const {totalCount, skipped, annotations} = await parseFile('test_results/catch2/report.xml')
+ const filtered = annotations.filter(annotation => annotation.annotation_level !== 'notice')
+
+ expect(totalCount).toBe(1)
+ expect(skipped).toBe(0)
+ expect(filtered).toStrictEqual([
+ {
+ path: 'test/unit/detail/utility/is_constant_evaluated.cpp',
+ start_line: 19,
+ end_line: 19,
+ start_column: 0,
+ end_column: 0,
+ annotation_level: 'failure',
+ status: 'failure',
+ title: 'test/unit/detail/utility/is_constant_evaluated.cpp.is constant evaluated',
+ message: 'REQUIRE(v == 1) expands to 0 == 10',
+ raw_details:
+ 'FAILED:\n REQUIRE( v == 1 )\nwith expansion:\n 0 == 1\n0\nat /__w/futures/futures/test/unit/detail/utility/is_constant_evaluated.cpp:19'
+ }
+ ])
+ })
+
+ it('parse corrupt test output', async () => {
+ const result = await parseTestReports(
+ '',
+ '',
+ 'test_results/corrupt-junit/**/target/sf-reports/TEST-*.xml',
+ '',
+ false,
+ false,
+ [],
+ '',
+ '',
+ undefined,
+ false,
+ undefined
+ )
+
+ expect(result).toStrictEqual({
+ checkName: "",
+ summary: "",
+ totalCount: 0,
+ skipped: 0,
+ failed: 0,
+ passed: 0,
+ annotations: [
+ ],
+ })
+ })
+})
diff --git a/__tests__/utils.test.ts b/__tests__/utils.test.ts
index 9cc8ad7d..0def4e43 100644
--- a/__tests__/utils.test.ts
+++ b/__tests__/utils.test.ts
@@ -1,5 +1,5 @@
-import { parseFile, Transformer } from '../src/testParser';
-import { readTransformers } from '../src/utils';
+import {parseFile, Transformer} from '../src/testParser'
+import {readTransformers} from '../src/utils'
/**
* Copyright 2022 Mike Penz
@@ -7,27 +7,29 @@ import { readTransformers } from '../src/utils';
jest.setTimeout(30000)
describe('readTransformers', () => {
- it('should successfully parse default transformer', async () => {
- const transformer = readTransformers('[{"searchValue":"::","replaceValue":"/"}]')
- expect(transformer).toStrictEqual([
- {
- searchValue: "::",
- replaceValue: "/",
- }
- ]);
- })
+ it('should successfully parse default transformer', async () => {
+ const transformer = readTransformers('[{"searchValue":"::","replaceValue":"/"}]')
+ expect(transformer).toStrictEqual([
+ {
+ searchValue: '::',
+ replaceValue: '/'
+ }
+ ])
+ })
- it('should successfully parse custom transformer', async () => {
- const transformer = readTransformers('[{"searchValue":"\\\\.","replaceValue":"/"},{"searchValue":"_t\\\\z","replaceValue":".t"}]')
- expect(transformer).toStrictEqual([
- {
- searchValue: "\\.",
- replaceValue: "/",
- },
- {
- searchValue: "_t\\z",
- replaceValue: ".t",
- },
- ]);
- })
-})
\ No newline at end of file
+ it('should successfully parse custom transformer', async () => {
+ const transformer = readTransformers(
+ '[{"searchValue":"\\\\.","replaceValue":"/"},{"searchValue":"_t\\\\z","replaceValue":".t"}]'
+ )
+ expect(transformer).toStrictEqual([
+ {
+ searchValue: '\\.',
+ replaceValue: '/'
+ },
+ {
+ searchValue: '_t\\z',
+ replaceValue: '.t'
+ }
+ ])
+ })
+})
diff --git a/action.yml b/action.yml
index ad72568c..9eb3739a 100644
--- a/action.yml
+++ b/action.yml
@@ -10,7 +10,7 @@ inputs:
default: ${{ github.token }}
github_token:
description: 'Deprecated syntax to specify github token.'
- required: true
+ required: false
report_paths:
description: 'Xml report paths in glob format'
required: false
@@ -22,7 +22,6 @@ inputs:
exclude_sources:
description: 'Comma seperated list of source folders to ignore for lookup'
required: false
- default: '/build/,/__pycache__/'
suite_regex:
description: 'Regular expression for the named test suites'
required: false
@@ -50,6 +49,10 @@ inputs:
description: 'Fail if no test are found.'
required: false
default: 'false'
+ require_passed_tests:
+ description: 'Fail if no passed test are found.'
+ required: false
+ default: 'false'
include_passed:
description: 'Include passed tests in the report'
required: false
@@ -94,5 +97,5 @@ inputs:
description: 'Specify the limit for annotations. This will also interrupt parsing all test-suites if the limit is reached.'
required: false
runs:
- using: 'node16'
+ using: 'node20'
main: 'dist/index.js'
diff --git a/dist/index.js b/dist/index.js
index 3d14c295..f586e717 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -51,7 +51,7 @@ function annotateTestResult(testResult, token, headSha, annotateOnly, updateChec
title = `${testResult.totalCount} tests run, ${testResult.passed} passed, ${testResult.skipped} skipped, ${testResult.failed} failed.`;
}
core.info(`ℹ️ - ${testResult.checkName} - ${title}`);
- const conclusion = foundResults && testResult.failed <= 0 ? 'success' : 'failure';
+ const conclusion = testResult.failed <= 0 ? 'success' : 'failure';
for (const annotation of annotations) {
core.info(` 🧪 - ${annotation.path} | ${annotation.message.split('\n', 1)[0]}`);
}
@@ -95,7 +95,9 @@ function annotateTestResult(testResult, token, headSha, annotateOnly, updateChec
}
}
else {
- const createCheckRequest = Object.assign(Object.assign({}, github.context.repo), { name: testResult.checkName, head_sha: headSha, status: 'completed', conclusion, output: {
+ const status = 'completed';
+ const createCheckRequest = Object.assign(Object.assign({}, github.context.repo), { name: testResult.checkName, head_sha: headSha, status,
+ conclusion, output: {
title,
summary: testResult.summary,
annotations: annotations.slice(0, 50)
@@ -115,7 +117,7 @@ function attachSummary(testResults, detailedSummary, includePassed) {
{ data: '', header: true },
{ data: 'Tests', header: true },
{ data: 'Passed ✅', header: true },
- { data: 'Skipped ↪️', header: true },
+ { data: 'Skipped ⏭️', header: true },
{ data: 'Failed ❌', header: true }
]
];
@@ -129,7 +131,7 @@ function attachSummary(testResults, detailedSummary, includePassed) {
for (const testResult of testResults) {
table.push([
`${testResult.checkName}`,
- `${testResult.totalCount} run`,
+ `${testResult.totalCount} ran`,
`${testResult.passed} passed`,
`${testResult.skipped} skipped`,
`${testResult.failed} failed`
@@ -147,7 +149,11 @@ function attachSummary(testResults, detailedSummary, includePassed) {
detailsTable.push([
`${testResult.checkName}`,
`${annotation.title}`,
- `${annotation.annotation_level === 'notice' ? '✅ pass' : `❌ ${annotation.annotation_level}`}`
+ `${annotation.status === 'success'
+ ? '✅ pass'
+ : annotation.status === 'skipped'
+ ? `⏭️ skipped`
+ : `❌ ${annotation.annotation_level}`}`
]);
}
}
@@ -273,6 +279,7 @@ function run() {
const commit = core.getInput('commit');
const failOnFailure = core.getInput('fail_on_failure') === 'true';
const requireTests = core.getInput('require_tests') === 'true';
+ const requirePassedTests = core.getInput('require_passed_tests') === 'true';
const includePassed = core.getInput('include_passed') === 'true';
const checkRetries = core.getInput('check_retries') === 'true';
const annotateNotice = core.getInput('annotate_notice') === 'true';
@@ -284,11 +291,14 @@ function run() {
const checkName = core.getMultilineInput('check_name');
const testFilesPrefix = core.getMultilineInput('test_files_prefix');
const suiteRegex = core.getMultilineInput('suite_regex');
- const excludeSources = core.getMultilineInput('exclude_sources') ? core.getMultilineInput('exclude_sources') : [];
+ let excludeSources = core.getMultilineInput('exclude_sources') ? core.getMultilineInput('exclude_sources') : [];
const checkTitleTemplate = core.getMultilineInput('check_title_template');
const transformers = (0, utils_1.readTransformers)(core.getInput('transformers', { trimWhitespace: true }));
const followSymlink = core.getBooleanInput('follow_symlink');
const annotationsLimit = Number(core.getInput('annotations_limit') || -1);
+ if (excludeSources.length === 0) {
+ excludeSources = ['/build/', '/__pycache__/'];
+ }
core.endGroup();
core.startGroup(`📦 Process test results`);
const reportsCount = reportPaths.length;
@@ -310,13 +320,6 @@ function run() {
mergedResult.failed += testResult.failed;
mergedResult.passed += testResult.passed;
mergedResult.annotations.push(...testResult.annotations);
- const foundResults = testResult.totalCount > 0 || testResult.skipped > 0;
- if (!foundResults) {
- if (requireTests) {
- core.setFailed(`❌ No test results found for ${checkName}`);
- }
- return;
- }
testResults.push(testResult);
}
core.setOutput('total', mergedResult.totalCount);
@@ -324,9 +327,17 @@ function run() {
core.setOutput('skipped', mergedResult.skipped);
core.setOutput('failed', mergedResult.failed);
core.setOutput('failedTests', (0, failedTestsReport_1.generateFailedTestsReport)(mergedResult.annotations));
+ if (!(mergedResult.totalCount > 0 || mergedResult.skipped > 0) && requireTests) {
+ core.setFailed(`❌ No test results found for ${checkName}`);
+ return; // end if we failed due to no tests, but configured to require tests
+ }
+ else if (!(mergedResult.passed > 0) && requirePassedTests) {
+ core.setFailed(`❌ No passed test results found for ${checkName}`);
+ return; // end if we failed due to no passed tests, but configured to require passed tests
+ }
const pullRequest = github.context.payload.pull_request;
const link = (pullRequest && pullRequest.html_url) || github.context.ref;
- const conclusion = mergedResult.totalCount > 0 && mergedResult.failed <= 0 ? 'success' : 'failure';
+ const conclusion = mergedResult.failed <= 0 ? 'success' : 'failure';
const headSha = commit || (pullRequest && pullRequest.head.sha) || github.context.sha;
core.info(`ℹ️ Posting with conclusion '${conclusion}' to ${link} (sha: ${headSha})`);
core.endGroup();
@@ -492,21 +503,16 @@ function resolvePath(fileName, excludeSources, followSymlink = false) {
});
const searchPath = globber.getSearchPaths() ? globber.getSearchPaths()[0] : '';
try {
- for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a;) {
+ for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
_c = _f.value;
_d = false;
- try {
- const result = _c;
- core.debug(`Matched file: ${result}`);
- const found = excludeSources.find(v => result.includes(v));
- if (!found) {
- const path = result.slice(searchPath.length + 1);
- core.debug(`Resolved path: ${path}`);
- return path;
- }
- }
- finally {
- _d = true;
+ const result = _c;
+ core.debug(`Matched file: ${result}`);
+ const found = excludeSources.find(v => result.includes(v));
+ if (!found) {
+ const path = result.slice(searchPath.length + 1);
+ core.debug(`Resolved path: ${path}`);
+ return path;
}
}
}
@@ -532,7 +538,19 @@ function parseFile(file, suiteRegex = '', annotatePassed = false, checkRetries =
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Parsing file ${file}`);
const data = fs.readFileSync(file, 'utf8');
- const report = JSON.parse(parser.xml2json(data, { compact: true }));
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ let report;
+ try {
+ report = JSON.parse(parser.xml2json(data, { compact: true }));
+ }
+ catch (error) {
+ core.error(`⚠️ Failed to parse file (${file}) with error ${error}`);
+ return {
+ totalCount: 0,
+ skipped: 0,
+ annotations: []
+ };
+ }
return parseSuite(report, '', suiteRegex, annotatePassed, checkRetries, excludeSources, checkTitleTemplate, testFilesPrefix, transformer, followSymlink, annotationsLimit);
});
}
@@ -619,8 +637,10 @@ suite, parentName, suiteRegex, annotatePassed = false, checkRetries = false, exc
}
for (const testcase of testcases) {
totalCount++;
- const failed = testcase.failure || testcase.error;
- const success = !failed;
+ const testFailure = testcase.failure || testcase.error; // test failed
+ const skip = testcase.skipped || testcase._attributes.status === 'disabled' || testcase._attributes.status === 'ignored';
+ const failed = testFailure && !skip; // test faiure, but was skipped -> don't fail if a ignored test failed
+ const success = !testFailure; // not a failure -> thus a success
// in some definitions `failure` may be an array
const failures = testcase.failure
? Array.isArray(testcase.failure)
@@ -629,7 +649,7 @@ suite, parentName, suiteRegex, annotatePassed = false, checkRetries = false, exc
: undefined;
// the action only supports 1 failure per testcase
const failure = failures ? failures[0] : undefined;
- if (testcase.skipped || testcase._attributes.status === 'disabled') {
+ if (skip) {
skipped++;
}
const stackTrace = ((failure && failure._cdata) ||
@@ -679,14 +699,17 @@ suite, parentName, suiteRegex, annotatePassed = false, checkRetries = false, exc
}
// optionally attach the prefix to the path
resolvedPath = testFilesPrefix ? pathHelper.join(testFilesPrefix, resolvedPath) : resolvedPath;
- core.info(`${resolvedPath}:${pos.line} | ${message.replace(/\n/g, ' ')}`);
+ // fish the time-taken out of the test case attributes, if present
+ const testTime = testcase._attributes.time === undefined ? '' : ` (${testcase._attributes.time}s)`;
+ core.info(`${resolvedPath}:${pos.line} | ${message.replace(/\n/g, ' ')}${testTime}`);
annotations.push({
path: resolvedPath,
start_line: pos.line,
end_line: pos.line,
start_column: 0,
end_column: 0,
- annotation_level: success ? 'notice' : 'failure',
+ annotation_level: success || skip ? 'notice' : 'failure',
+ status: skip ? 'skipped' : success ? 'success' : 'failure',
title: escapeEmoji(title),
message: escapeEmoji(message),
raw_details: escapeEmoji(stackTrace)
@@ -709,7 +732,7 @@ suite, parentName, suiteRegex, annotatePassed = false, checkRetries = false, exc
* Modification Copyright 2022 Mike Penz
* https://github.com/mikepenz/action-junit-report/
*/
-function parseTestReports(checkName, summary, reportPaths, suiteRegex, annotatePassed = false, checkRetries = false, excludeSources, checkTitleTemplate = undefined, testFilesPrefix = '', transformer, followSymlink = false, annotationsLimit) {
+function parseTestReports(checkName, summary, reportPaths, suiteRegex, annotatePassed = false, checkRetries = false, excludeSources, checkTitleTemplate = undefined, testFilesPrefix = '', transformer = [], followSymlink = false, annotationsLimit = -1) {
var _a, e_2, _b, _c;
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Process test report for: ${reportPaths} (${checkName})`);
@@ -718,28 +741,23 @@ function parseTestReports(checkName, summary, reportPaths, suiteRegex, annotateP
let totalCount = 0;
let skipped = 0;
try {
- for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a;) {
+ for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
_c = _f.value;
_d = false;
- try {
- const file = _c;
- core.debug(`Parsing report file: ${file}`);
- const { totalCount: c, skipped: s, annotations: a } = yield parseFile(file, suiteRegex, annotatePassed, checkRetries, excludeSources, checkTitleTemplate, testFilesPrefix, transformer, followSymlink, annotationsLimit);
- if (c === 0)
- continue;
- totalCount += c;
- skipped += s;
- annotations = annotations.concat(a);
- if (annotationsLimit > 0) {
- const count = annotations.filter(an => an.annotation_level === 'failure' || annotatePassed).length;
- if (count >= annotationsLimit) {
- break;
- }
+ const file = _c;
+ core.debug(`Parsing report file: ${file}`);
+ const { totalCount: c, skipped: s, annotations: a } = yield parseFile(file, suiteRegex, annotatePassed, checkRetries, excludeSources, checkTitleTemplate, testFilesPrefix, transformer, followSymlink, annotationsLimit);
+ if (c === 0)
+ continue;
+ totalCount += c;
+ skipped += s;
+ annotations = annotations.concat(a);
+ if (annotationsLimit > 0) {
+ const count = annotations.filter(an => an.annotation_level === 'failure' || annotatePassed).length;
+ if (count >= annotationsLimit) {
+ break;
}
}
- finally {
- _d = true;
- }
}
}
catch (e_2_1) { e_2 = { error: e_2_1 }; }
@@ -1424,7 +1442,7 @@ class OidcClient {
.catch(error => {
throw new Error(`Failed to get ID Token. \n
Error Code : ${error.statusCode}\n
- Error Message: ${error.result.message}`);
+ Error Message: ${error.message}`);
});
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
if (!id_token) {
@@ -1877,8 +1895,8 @@ class Context {
var _a, _b, _c;
this.payload = {};
if (process.env.GITHUB_EVENT_PATH) {
- if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
- this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
+ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) {
+ this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
}
else {
const path = process.env.GITHUB_EVENT_PATH;
@@ -1896,7 +1914,8 @@ class Context {
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
- this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
+ this.graphqlUrl =
+ (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
}
get issue() {
const payload = this.payload;
@@ -1928,7 +1947,11 @@ exports.Context = Context;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
@@ -1941,7 +1964,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
@@ -1958,7 +1981,7 @@ exports.context = new Context.Context();
*/
function getOctokit(token, options, ...additionalPlugins) {
const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);
- return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options));
+ return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options));
}
exports.getOctokit = getOctokit;
//# sourceMappingURL=github.js.map
@@ -1972,7 +1995,11 @@ exports.getOctokit = getOctokit;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
@@ -1985,13 +2012,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
+exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0;
const httpClient = __importStar(__nccwpck_require__(6255));
+const undici_1 = __nccwpck_require__(1773);
function getAuthString(token, options) {
if (!token && !options.auth) {
throw new Error('Parameter token or opts.auth is required');
@@ -2007,6 +2044,19 @@ function getProxyAgent(destinationUrl) {
return hc.getAgent(destinationUrl);
}
exports.getProxyAgent = getProxyAgent;
+function getProxyAgentDispatcher(destinationUrl) {
+ const hc = new httpClient.HttpClient();
+ return hc.getAgentDispatcher(destinationUrl);
+}
+exports.getProxyAgentDispatcher = getProxyAgentDispatcher;
+function getProxyFetch(destinationUrl) {
+ const httpDispatcher = getProxyAgentDispatcher(destinationUrl);
+ const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () {
+ return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher }));
+ });
+ return proxyFetch;
+}
+exports.getProxyFetch = getProxyFetch;
function getApiBaseUrl() {
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
}
@@ -2022,7 +2072,11 @@ exports.getApiBaseUrl = getApiBaseUrl;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
@@ -2035,7 +2089,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
@@ -2052,7 +2106,8 @@ const baseUrl = Utils.getApiBaseUrl();
exports.defaults = {
baseUrl,
request: {
- agent: Utils.getProxyAgent(baseUrl)
+ agent: Utils.getProxyAgent(baseUrl),
+ fetch: Utils.getProxyFetch(baseUrl)
}
};
exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults);
@@ -2110,16 +2165,18 @@ exports.create = create;
* Computes the sha256 hash of a glob
*
* @param patterns Patterns separated by newlines
+ * @param currentWorkspace Workspace used when matching files
* @param options Glob options
+ * @param verbose Enables verbose logging
*/
-function hashFiles(patterns, options, verbose = false) {
+function hashFiles(patterns, currentWorkspace = '', options, verbose = false) {
return __awaiter(this, void 0, void 0, function* () {
let followSymbolicLinks = true;
if (options && typeof options.followSymbolicLinks === 'boolean') {
followSymbolicLinks = options.followSymbolicLinks;
}
const globber = yield create(patterns, { followSymbolicLinks });
- return internal_hash_files_1.hashFiles(globber, verbose);
+ return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose);
});
}
exports.hashFiles = hashFiles;
@@ -2479,13 +2536,15 @@ const fs = __importStar(__nccwpck_require__(7147));
const stream = __importStar(__nccwpck_require__(2781));
const util = __importStar(__nccwpck_require__(3837));
const path = __importStar(__nccwpck_require__(1017));
-function hashFiles(globber, verbose = false) {
+function hashFiles(globber, currentWorkspace, verbose = false) {
var e_1, _a;
var _b;
return __awaiter(this, void 0, void 0, function* () {
const writeDelegate = verbose ? core.info : core.debug;
let hasMatch = false;
- const githubWorkspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
+ const githubWorkspace = currentWorkspace
+ ? currentWorkspace
+ : (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
const result = crypto.createHash('sha256');
let count = 0;
try {
@@ -3360,7 +3419,11 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand
/* eslint-disable @typescript-eslint/no-explicit-any */
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
@@ -3373,7 +3436,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
@@ -3392,6 +3455,7 @@ const http = __importStar(__nccwpck_require__(3685));
const https = __importStar(__nccwpck_require__(5687));
const pm = __importStar(__nccwpck_require__(9835));
const tunnel = __importStar(__nccwpck_require__(4294));
+const undici_1 = __nccwpck_require__(1773);
var HttpCodes;
(function (HttpCodes) {
HttpCodes[HttpCodes["OK"] = 200] = "OK";
@@ -3421,16 +3485,16 @@ var HttpCodes;
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
-})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
+})(HttpCodes || (exports.HttpCodes = HttpCodes = {}));
var Headers;
(function (Headers) {
Headers["Accept"] = "accept";
Headers["ContentType"] = "content-type";
-})(Headers = exports.Headers || (exports.Headers = {}));
+})(Headers || (exports.Headers = Headers = {}));
var MediaTypes;
(function (MediaTypes) {
MediaTypes["ApplicationJson"] = "application/json";
-})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
+})(MediaTypes || (exports.MediaTypes = MediaTypes = {}));
/**
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
@@ -3481,6 +3545,19 @@ class HttpClientResponse {
}));
});
}
+ readBodyBuffer() {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
+ const chunks = [];
+ this.message.on('data', (chunk) => {
+ chunks.push(chunk);
+ });
+ this.message.on('end', () => {
+ resolve(Buffer.concat(chunks));
+ });
+ }));
+ });
+ }
}
exports.HttpClientResponse = HttpClientResponse;
function isHttps(requestUrl) {
@@ -3786,6 +3863,15 @@ class HttpClient {
const parsedUrl = new URL(serverUrl);
return this._getAgent(parsedUrl);
}
+ getAgentDispatcher(serverUrl) {
+ const parsedUrl = new URL(serverUrl);
+ const proxyUrl = pm.getProxyUrl(parsedUrl);
+ const useProxy = proxyUrl && proxyUrl.hostname;
+ if (!useProxy) {
+ return;
+ }
+ return this._getProxyAgentDispatcher(parsedUrl, proxyUrl);
+ }
_prepareRequest(method, requestUrl, headers) {
const info = {};
info.parsedUrl = requestUrl;
@@ -3885,6 +3971,30 @@ class HttpClient {
}
return agent;
}
+ _getProxyAgentDispatcher(parsedUrl, proxyUrl) {
+ let proxyAgent;
+ if (this._keepAlive) {
+ proxyAgent = this._proxyAgentDispatcher;
+ }
+ // if agent is already assigned use that agent.
+ if (proxyAgent) {
+ return proxyAgent;
+ }
+ const usingSsl = parsedUrl.protocol === 'https:';
+ proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && {
+ token: `${proxyUrl.username}:${proxyUrl.password}`
+ })));
+ this._proxyAgentDispatcher = proxyAgent;
+ if (usingSsl && this._ignoreSslError) {
+ // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+ // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+ // we have to cast it to any and change it directly
+ proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, {
+ rejectUnauthorized: false
+ });
+ }
+ return proxyAgent;
+ }
_performExponentialBackoff(retryNumber) {
return __awaiter(this, void 0, void 0, function* () {
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
@@ -3985,7 +4095,13 @@ function getProxyUrl(reqUrl) {
}
})();
if (proxyVar) {
- return new URL(proxyVar);
+ try {
+ return new URL(proxyVar);
+ }
+ catch (_a) {
+ if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
+ return new URL(`http://${proxyVar}`);
+ }
}
else {
return undefined;
@@ -3996,6 +4112,10 @@ function checkBypass(reqUrl) {
if (!reqUrl.hostname) {
return false;
}
+ const reqHost = reqUrl.hostname;
+ if (isLoopbackAddress(reqHost)) {
+ return true;
+ }
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
if (!noProxy) {
return false;
@@ -4021,2550 +4141,5137 @@ function checkBypass(reqUrl) {
.split(',')
.map(x => x.trim().toUpperCase())
.filter(x => x)) {
- if (upperReqHosts.some(x => x === upperNoProxyItem)) {
+ if (upperNoProxyItem === '*' ||
+ upperReqHosts.some(x => x === upperNoProxyItem ||
+ x.endsWith(`.${upperNoProxyItem}`) ||
+ (upperNoProxyItem.startsWith('.') &&
+ x.endsWith(`${upperNoProxyItem}`)))) {
return true;
}
}
return false;
}
exports.checkBypass = checkBypass;
+function isLoopbackAddress(host) {
+ const hostLower = host.toLowerCase();
+ return (hostLower === 'localhost' ||
+ hostLower.startsWith('127.') ||
+ hostLower.startsWith('[::1]') ||
+ hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
+}
//# sourceMappingURL=proxy.js.map
/***/ }),
-/***/ 334:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ 2856:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+const WritableStream = (__nccwpck_require__(4492).Writable)
+const inherits = (__nccwpck_require__(7261).inherits)
-const REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
-const REGEX_IS_INSTALLATION = /^ghs_/;
-const REGEX_IS_USER_TO_SERVER = /^ghu_/;
-async function auth(token) {
- const isApp = token.split(/\./).length === 3;
- const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
- const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
- const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
- return {
- type: "token",
- token: token,
- tokenType
- };
+const StreamSearch = __nccwpck_require__(8534)
+
+const PartStream = __nccwpck_require__(8710)
+const HeaderParser = __nccwpck_require__(333)
+
+const DASH = 45
+const B_ONEDASH = Buffer.from('-')
+const B_CRLF = Buffer.from('\r\n')
+const EMPTY_FN = function () {}
+
+function Dicer (cfg) {
+ if (!(this instanceof Dicer)) { return new Dicer(cfg) }
+ WritableStream.call(this, cfg)
+
+ if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }
+
+ if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }
+
+ this._headerFirst = cfg.headerFirst
+
+ this._dashes = 0
+ this._parts = 0
+ this._finished = false
+ this._realFinish = false
+ this._isPreamble = true
+ this._justMatched = false
+ this._firstWrite = true
+ this._inHeader = true
+ this._part = undefined
+ this._cb = undefined
+ this._ignoreData = false
+ this._partOpts = { highWaterMark: cfg.partHwm }
+ this._pause = false
+
+ const self = this
+ this._hparser = new HeaderParser(cfg)
+ this._hparser.on('header', function (header) {
+ self._inHeader = false
+ self._part.emit('header', header)
+ })
+}
+inherits(Dicer, WritableStream)
+
+Dicer.prototype.emit = function (ev) {
+ if (ev === 'finish' && !this._realFinish) {
+ if (!this._finished) {
+ const self = this
+ process.nextTick(function () {
+ self.emit('error', new Error('Unexpected end of multipart data'))
+ if (self._part && !self._ignoreData) {
+ const type = (self._isPreamble ? 'Preamble' : 'Part')
+ self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))
+ self._part.push(null)
+ process.nextTick(function () {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ })
+ return
+ }
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ })
+ }
+ } else { WritableStream.prototype.emit.apply(this, arguments) }
}
-/**
- * Prefix token for usage in the Authorization header
- *
- * @param token OAuth token or JSON Web Token
- */
-function withAuthorizationPrefix(token) {
- if (token.split(/\./).length === 3) {
- return `bearer ${token}`;
+Dicer.prototype._write = function (data, encoding, cb) {
+ // ignore unexpected data (e.g. extra trailer data after finished)
+ if (!this._hparser && !this._bparser) { return cb() }
+
+ if (this._headerFirst && this._isPreamble) {
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts)
+ if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() }
+ }
+ const r = this._hparser.push(data)
+ if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
}
- return `token ${token}`;
+ // allows for "easier" testing
+ if (this._firstWrite) {
+ this._bparser.push(B_CRLF)
+ this._firstWrite = false
+ }
+
+ this._bparser.push(data)
+
+ if (this._pause) { this._cb = cb } else { cb() }
}
-async function hook(token, request, route, parameters) {
- const endpoint = request.endpoint.merge(route, parameters);
- endpoint.headers.authorization = withAuthorizationPrefix(token);
- return request(endpoint);
+Dicer.prototype.reset = function () {
+ this._part = undefined
+ this._bparser = undefined
+ this._hparser = undefined
}
-const createTokenAuth = function createTokenAuth(token) {
- if (!token) {
- throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
+Dicer.prototype.setBoundary = function (boundary) {
+ const self = this
+ this._bparser = new StreamSearch('\r\n--' + boundary)
+ this._bparser.on('info', function (isMatch, data, start, end) {
+ self._oninfo(isMatch, data, start, end)
+ })
+}
+
+Dicer.prototype._ignore = function () {
+ if (this._part && !this._ignoreData) {
+ this._ignoreData = true
+ this._part.on('error', EMPTY_FN)
+ // we must perform some kind of read on the stream even though we are
+ // ignoring the data, otherwise node's Readable stream will not emit 'end'
+ // after pushing null to the stream
+ this._part.resume()
}
+}
- if (typeof token !== "string") {
- throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
+Dicer.prototype._oninfo = function (isMatch, data, start, end) {
+ let buf; const self = this; let i = 0; let r; let shouldWriteMore = true
+
+ if (!this._part && this._justMatched && data) {
+ while (this._dashes < 2 && (start + i) < end) {
+ if (data[start + i] === DASH) {
+ ++i
+ ++this._dashes
+ } else {
+ if (this._dashes) { buf = B_ONEDASH }
+ this._dashes = 0
+ break
+ }
+ }
+ if (this._dashes === 2) {
+ if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) }
+ this.reset()
+ this._finished = true
+ // no more parts will be added
+ if (self._parts === 0) {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ }
+ }
+ if (this._dashes) { return }
+ }
+ if (this._justMatched) { this._justMatched = false }
+ if (!this._part) {
+ this._part = new PartStream(this._partOpts)
+ this._part._read = function (n) {
+ self._unpause()
+ }
+ if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() }
+ if (!this._isPreamble) { this._inHeader = true }
}
+ if (data && start < end && !this._ignoreData) {
+ if (this._isPreamble || !this._inHeader) {
+ if (buf) { shouldWriteMore = this._part.push(buf) }
+ shouldWriteMore = this._part.push(data.slice(start, end))
+ if (!shouldWriteMore) { this._pause = true }
+ } else if (!this._isPreamble && this._inHeader) {
+ if (buf) { this._hparser.push(buf) }
+ r = this._hparser.push(data.slice(start, end))
+ if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }
+ }
+ }
+ if (isMatch) {
+ this._hparser.reset()
+ if (this._isPreamble) { this._isPreamble = false } else {
+ if (start !== end) {
+ ++this._parts
+ this._part.on('end', function () {
+ if (--self._parts === 0) {
+ if (self._finished) {
+ self._realFinish = true
+ self.emit('finish')
+ self._realFinish = false
+ } else {
+ self._unpause()
+ }
+ }
+ })
+ }
+ }
+ this._part.push(null)
+ this._part = undefined
+ this._ignoreData = false
+ this._justMatched = true
+ this._dashes = 0
+ }
+}
- token = token.replace(/^(token|bearer) +/i, "");
- return Object.assign(auth.bind(null, token), {
- hook: hook.bind(null, token)
- });
-};
+Dicer.prototype._unpause = function () {
+ if (!this._pause) { return }
-exports.createTokenAuth = createTokenAuth;
-//# sourceMappingURL=index.js.map
+ this._pause = false
+ if (this._cb) {
+ const cb = this._cb
+ this._cb = undefined
+ cb()
+ }
+}
+
+module.exports = Dicer
/***/ }),
-/***/ 6762:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ 333:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-var universalUserAgent = __nccwpck_require__(5030);
-var beforeAfterHook = __nccwpck_require__(3682);
-var request = __nccwpck_require__(6234);
-var graphql = __nccwpck_require__(8467);
-var authToken = __nccwpck_require__(334);
-
-function _objectWithoutPropertiesLoose(source, excluded) {
- if (source == null) return {};
- var target = {};
- var sourceKeys = Object.keys(source);
- var key, i;
+const EventEmitter = (__nccwpck_require__(5673).EventEmitter)
+const inherits = (__nccwpck_require__(7261).inherits)
+const getLimit = __nccwpck_require__(9692)
+
+const StreamSearch = __nccwpck_require__(8534)
+
+const B_DCRLF = Buffer.from('\r\n\r\n')
+const RE_CRLF = /\r\n/g
+const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex
+
+function HeaderParser (cfg) {
+ EventEmitter.call(this)
+
+ cfg = cfg || {}
+ const self = this
+ this.nread = 0
+ this.maxed = false
+ this.npairs = 0
+ this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)
+ this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)
+ this.buffer = ''
+ this.header = {}
+ this.finished = false
+ this.ss = new StreamSearch(B_DCRLF)
+ this.ss.on('info', function (isMatch, data, start, end) {
+ if (data && !self.maxed) {
+ if (self.nread + end - start >= self.maxHeaderSize) {
+ end = self.maxHeaderSize - self.nread + start
+ self.nread = self.maxHeaderSize
+ self.maxed = true
+ } else { self.nread += (end - start) }
+
+ self.buffer += data.toString('binary', start, end)
+ }
+ if (isMatch) { self._finish() }
+ })
+}
+inherits(HeaderParser, EventEmitter)
- for (i = 0; i < sourceKeys.length; i++) {
- key = sourceKeys[i];
- if (excluded.indexOf(key) >= 0) continue;
- target[key] = source[key];
- }
+HeaderParser.prototype.push = function (data) {
+ const r = this.ss.push(data)
+ if (this.finished) { return r }
+}
- return target;
+HeaderParser.prototype.reset = function () {
+ this.finished = false
+ this.buffer = ''
+ this.header = {}
+ this.ss.reset()
}
-function _objectWithoutProperties(source, excluded) {
- if (source == null) return {};
+HeaderParser.prototype._finish = function () {
+ if (this.buffer) { this._parseHeader() }
+ this.ss.matches = this.ss.maxMatches
+ const header = this.header
+ this.header = {}
+ this.buffer = ''
+ this.finished = true
+ this.nread = this.npairs = 0
+ this.maxed = false
+ this.emit('header', header)
+}
- var target = _objectWithoutPropertiesLoose(source, excluded);
+HeaderParser.prototype._parseHeader = function () {
+ if (this.npairs === this.maxHeaderPairs) { return }
- var key, i;
+ const lines = this.buffer.split(RE_CRLF)
+ const len = lines.length
+ let m, h
- if (Object.getOwnPropertySymbols) {
- var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
+ for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+ if (lines[i].length === 0) { continue }
+ if (lines[i][0] === '\t' || lines[i][0] === ' ') {
+ // folded header content
+ // RFC2822 says to just remove the CRLF and not the whitespace following
+ // it, so we follow the RFC and include the leading whitespace ...
+ if (h) {
+ this.header[h][this.header[h].length - 1] += lines[i]
+ continue
+ }
+ }
- for (i = 0; i < sourceSymbolKeys.length; i++) {
- key = sourceSymbolKeys[i];
- if (excluded.indexOf(key) >= 0) continue;
- if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
- target[key] = source[key];
+ const posColon = lines[i].indexOf(':')
+ if (
+ posColon === -1 ||
+ posColon === 0
+ ) {
+ return
}
+ m = RE_HDR.exec(lines[i])
+ h = m[1].toLowerCase()
+ this.header[h] = this.header[h] || []
+ this.header[h].push((m[2] || ''))
+ if (++this.npairs === this.maxHeaderPairs) { break }
}
-
- return target;
}
-const VERSION = "3.6.0";
+module.exports = HeaderParser
-const _excluded = ["authStrategy"];
-class Octokit {
- constructor(options = {}) {
- const hook = new beforeAfterHook.Collection();
- const requestDefaults = {
- baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
- headers: {},
- request: Object.assign({}, options.request, {
- // @ts-ignore internal usage only, no need to type
- hook: hook.bind(null, "request")
- }),
- mediaType: {
- previews: [],
- format: ""
- }
- }; // prepend default user agent with `options.userAgent` if set
- requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
+/***/ }),
- if (options.baseUrl) {
- requestDefaults.baseUrl = options.baseUrl;
- }
+/***/ 8710:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- if (options.previews) {
- requestDefaults.mediaType.previews = options.previews;
- }
+"use strict";
- if (options.timeZone) {
- requestDefaults.headers["time-zone"] = options.timeZone;
- }
- this.request = request.request.defaults(requestDefaults);
- this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
- this.log = Object.assign({
- debug: () => {},
- info: () => {},
- warn: console.warn.bind(console),
- error: console.error.bind(console)
- }, options.log);
- this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
- // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
- // (2) If only `options.auth` is set, use the default token authentication strategy.
- // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
- // TODO: type `options.auth` based on `options.authStrategy`.
+const inherits = (__nccwpck_require__(7261).inherits)
+const ReadableStream = (__nccwpck_require__(4492).Readable)
- if (!options.authStrategy) {
- if (!options.auth) {
- // (1)
- this.auth = async () => ({
- type: "unauthenticated"
- });
- } else {
- // (2)
- const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
+function PartStream (opts) {
+ ReadableStream.call(this, opts)
+}
+inherits(PartStream, ReadableStream)
- hook.wrap("request", auth.hook);
- this.auth = auth;
- }
- } else {
- const {
- authStrategy
- } = options,
- otherOptions = _objectWithoutProperties(options, _excluded);
-
- const auth = authStrategy(Object.assign({
- request: this.request,
- log: this.log,
- // we pass the current octokit instance as well as its constructor options
- // to allow for authentication strategies that return a new octokit instance
- // that shares the same internal state as the current one. The original
- // requirement for this was the "event-octokit" authentication strategy
- // of https://github.com/probot/octokit-auth-probot.
- octokit: this,
- octokitOptions: otherOptions
- }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
+PartStream.prototype._read = function (n) {}
- hook.wrap("request", auth.hook);
- this.auth = auth;
- } // apply plugins
- // https://stackoverflow.com/a/16345172
+module.exports = PartStream
- const classConstructor = this.constructor;
- classConstructor.plugins.forEach(plugin => {
- Object.assign(this, plugin(this, options));
- });
- }
+/***/ }),
- static defaults(defaults) {
- const OctokitWithDefaults = class extends this {
- constructor(...args) {
- const options = args[0] || {};
+/***/ 8534:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- if (typeof defaults === "function") {
- super(defaults(options));
- return;
- }
+"use strict";
- super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
- userAgent: `${options.userAgent} ${defaults.userAgent}`
- } : null));
- }
- };
- return OctokitWithDefaults;
- }
- /**
- * Attach a plugin (or many) to your Octokit instance.
- *
- * @example
- * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
- */
-
-
- static plugin(...newPlugins) {
- var _a;
+/**
+ * Copyright Brian White. All rights reserved.
+ *
+ * @see https://github.com/mscdex/streamsearch
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ * IN THE SOFTWARE.
+ *
+ * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation
+ * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool
+ */
+const EventEmitter = (__nccwpck_require__(5673).EventEmitter)
+const inherits = (__nccwpck_require__(7261).inherits)
- const currentPlugins = this.plugins;
- const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
- return NewOctokit;
+function SBMH (needle) {
+ if (typeof needle === 'string') {
+ needle = Buffer.from(needle)
}
-}
-Octokit.VERSION = VERSION;
-Octokit.plugins = [];
+ if (!Buffer.isBuffer(needle)) {
+ throw new TypeError('The needle has to be a String or a Buffer.')
+ }
-exports.Octokit = Octokit;
-//# sourceMappingURL=index.js.map
+ const needleLength = needle.length
+ if (needleLength === 0) {
+ throw new Error('The needle cannot be an empty String/Buffer.')
+ }
-/***/ }),
+ if (needleLength > 256) {
+ throw new Error('The needle cannot have a length bigger than 256.')
+ }
-/***/ 9440:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ this.maxMatches = Infinity
+ this.matches = 0
-"use strict";
+ this._occ = new Array(256)
+ .fill(needleLength) // Initialize occurrence table.
+ this._lookbehind_size = 0
+ this._needle = needle
+ this._bufpos = 0
+ this._lookbehind = Buffer.alloc(needleLength)
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+ // Populate occurrence table with analysis of the needle,
+ // ignoring last letter.
+ for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var
+ this._occ[needle[i]] = needleLength - 1 - i
+ }
+}
+inherits(SBMH, EventEmitter)
-var isPlainObject = __nccwpck_require__(3287);
-var universalUserAgent = __nccwpck_require__(5030);
+SBMH.prototype.reset = function () {
+ this._lookbehind_size = 0
+ this.matches = 0
+ this._bufpos = 0
+}
-function lowercaseKeys(object) {
- if (!object) {
- return {};
+SBMH.prototype.push = function (chunk, pos) {
+ if (!Buffer.isBuffer(chunk)) {
+ chunk = Buffer.from(chunk, 'binary')
}
+ const chlen = chunk.length
+ this._bufpos = pos || 0
+ let r
+ while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) }
+ return r
+}
+
+SBMH.prototype._sbmh_feed = function (data) {
+ const len = data.length
+ const needle = this._needle
+ const needleLength = needle.length
+ const lastNeedleChar = needle[needleLength - 1]
+
+ // Positive: points to a position in `data`
+ // pos == 3 points to data[3]
+ // Negative: points to a position in the lookbehind buffer
+ // pos == -2 points to lookbehind[lookbehind_size - 2]
+ let pos = -this._lookbehind_size
+ let ch
+
+ if (pos < 0) {
+ // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool
+ // search with character lookup code that considers both the
+ // lookbehind buffer and the current round's haystack data.
+ //
+ // Loop until
+ // there is a match.
+ // or until
+ // we've moved past the position that requires the
+ // lookbehind buffer. In this case we switch to the
+ // optimized loop.
+ // or until
+ // the character to look at lies outside the haystack.
+ while (pos < 0 && pos <= len - needleLength) {
+ ch = this._sbmh_lookup_char(data, pos + needleLength - 1)
+
+ if (
+ ch === lastNeedleChar &&
+ this._sbmh_memcmp(data, pos, needleLength - 1)
+ ) {
+ this._lookbehind_size = 0
+ ++this.matches
+ this.emit('info', true)
+
+ return (this._bufpos = pos + needleLength)
+ }
+ pos += this._occ[ch]
+ }
- return Object.keys(object).reduce((newObj, key) => {
- newObj[key.toLowerCase()] = object[key];
- return newObj;
- }, {});
-}
+ // No match.
-function mergeDeep(defaults, options) {
- const result = Object.assign({}, defaults);
- Object.keys(options).forEach(key => {
- if (isPlainObject.isPlainObject(options[key])) {
- if (!(key in defaults)) Object.assign(result, {
- [key]: options[key]
- });else result[key] = mergeDeep(defaults[key], options[key]);
- } else {
- Object.assign(result, {
- [key]: options[key]
- });
+ if (pos < 0) {
+ // There's too few data for Boyer-Moore-Horspool to run,
+ // so let's use a different algorithm to skip as much as
+ // we can.
+ // Forward pos until
+ // the trailing part of lookbehind + data
+ // looks like the beginning of the needle
+ // or until
+ // pos == 0
+ while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos }
}
- });
- return result;
-}
-function removeUndefinedProperties(obj) {
- for (const key in obj) {
- if (obj[key] === undefined) {
- delete obj[key];
+ if (pos >= 0) {
+ // Discard lookbehind buffer.
+ this.emit('info', false, this._lookbehind, 0, this._lookbehind_size)
+ this._lookbehind_size = 0
+ } else {
+ // Cut off part of the lookbehind buffer that has
+ // been processed and append the entire haystack
+ // into it.
+ const bytesToCutOff = this._lookbehind_size + pos
+ if (bytesToCutOff > 0) {
+ // The cut off data is guaranteed not to contain the needle.
+ this.emit('info', false, this._lookbehind, 0, bytesToCutOff)
+ }
+
+ this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff,
+ this._lookbehind_size - bytesToCutOff)
+ this._lookbehind_size -= bytesToCutOff
+
+ data.copy(this._lookbehind, this._lookbehind_size)
+ this._lookbehind_size += len
+
+ this._bufpos = len
+ return len
}
}
- return obj;
-}
+ pos += (pos >= 0) * this._bufpos
-function merge(defaults, route, options) {
- if (typeof route === "string") {
- let [method, url] = route.split(" ");
- options = Object.assign(url ? {
- method,
- url
- } : {
- url: method
- }, options);
+ // Lookbehind buffer is now empty. We only need to check if the
+ // needle is in the haystack.
+ if (data.indexOf(needle, pos) !== -1) {
+ pos = data.indexOf(needle, pos)
+ ++this.matches
+ if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) }
+
+ return (this._bufpos = pos + needleLength)
} else {
- options = Object.assign({}, route);
- } // lowercase header names before merging with defaults to avoid duplicates
+ pos = len - needleLength
+ }
+ // There was no match. If there's trailing haystack data that we cannot
+ // match yet using the Boyer-Moore-Horspool algorithm (because the trailing
+ // data is less than the needle size) then match using a modified
+ // algorithm that starts matching from the beginning instead of the end.
+ // Whatever trailing data is left after running this algorithm is added to
+ // the lookbehind buffer.
+ while (
+ pos < len &&
+ (
+ data[pos] !== needle[0] ||
+ (
+ (Buffer.compare(
+ data.subarray(pos, pos + len - pos),
+ needle.subarray(0, len - pos)
+ ) !== 0)
+ )
+ )
+ ) {
+ ++pos
+ }
+ if (pos < len) {
+ data.copy(this._lookbehind, 0, pos, pos + (len - pos))
+ this._lookbehind_size = len - pos
+ }
- options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
+ // Everything until pos is guaranteed not to contain needle data.
+ if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) }
- removeUndefinedProperties(options);
- removeUndefinedProperties(options.headers);
- const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
+ this._bufpos = len
+ return len
+}
- if (defaults && defaults.mediaType.previews.length) {
- mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
- }
+SBMH.prototype._sbmh_lookup_char = function (data, pos) {
+ return (pos < 0)
+ ? this._lookbehind[this._lookbehind_size + pos]
+ : data[pos]
+}
- mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
- return mergedOptions;
+SBMH.prototype._sbmh_memcmp = function (data, pos, len) {
+ for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
+ if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false }
+ }
+ return true
}
-function addQueryParameters(url, parameters) {
- const separator = /\?/.test(url) ? "&" : "?";
- const names = Object.keys(parameters);
+module.exports = SBMH
- if (names.length === 0) {
- return url;
- }
- return url + separator + names.map(name => {
- if (name === "q") {
- return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
- }
+/***/ }),
- return `${name}=${encodeURIComponent(parameters[name])}`;
- }).join("&");
-}
+/***/ 3438:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-const urlVariableRegex = /\{[^}]+\}/g;
+"use strict";
-function removeNonChars(variableName) {
- return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
-}
-function extractUrlVariableNames(url) {
- const matches = url.match(urlVariableRegex);
+const WritableStream = (__nccwpck_require__(4492).Writable)
+const { inherits } = __nccwpck_require__(7261)
+const Dicer = __nccwpck_require__(2856)
- if (!matches) {
- return [];
+const MultipartParser = __nccwpck_require__(415)
+const UrlencodedParser = __nccwpck_require__(6780)
+const parseParams = __nccwpck_require__(4426)
+
+function Busboy (opts) {
+ if (!(this instanceof Busboy)) { return new Busboy(opts) }
+
+ if (typeof opts !== 'object') {
+ throw new TypeError('Busboy expected an options-Object.')
+ }
+ if (typeof opts.headers !== 'object') {
+ throw new TypeError('Busboy expected an options-Object with headers-attribute.')
+ }
+ if (typeof opts.headers['content-type'] !== 'string') {
+ throw new TypeError('Missing Content-Type-header.')
}
- return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
-}
+ const {
+ headers,
+ ...streamOptions
+ } = opts
-function omit(object, keysToOmit) {
- return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
- obj[key] = object[key];
- return obj;
- }, {});
+ this.opts = {
+ autoDestroy: false,
+ ...streamOptions
+ }
+ WritableStream.call(this, this.opts)
+
+ this._done = false
+ this._parser = this.getParserByHeaders(headers)
+ this._finished = false
}
+inherits(Busboy, WritableStream)
-// Based on https://github.com/bramstein/url-template, licensed under BSD
-// TODO: create separate package.
-//
-// Copyright (c) 2012-2014, Bram Stein
-// All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-// 1. Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// 2. Redistributions in binary form must reproduce the above copyright
-// notice, this list of conditions and the following disclaimer in the
-// documentation and/or other materials provided with the distribution.
-// 3. The name of the author may not be used to endorse or promote products
-// derived from this software without specific prior written permission.
-// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
-// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
-// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
-// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
-// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-/* istanbul ignore file */
-function encodeReserved(str) {
- return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
- if (!/%[0-9A-Fa-f]/.test(part)) {
- part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
+Busboy.prototype.emit = function (ev) {
+ if (ev === 'finish') {
+ if (!this._done) {
+ this._parser?.end()
+ return
+ } else if (this._finished) {
+ return
}
-
- return part;
- }).join("");
+ this._finished = true
+ }
+ WritableStream.prototype.emit.apply(this, arguments)
}
-function encodeUnreserved(str) {
- return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
- return "%" + c.charCodeAt(0).toString(16).toUpperCase();
- });
-}
+Busboy.prototype.getParserByHeaders = function (headers) {
+ const parsed = parseParams(headers['content-type'])
-function encodeValue(operator, value, key) {
- value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
+ const cfg = {
+ defCharset: this.opts.defCharset,
+ fileHwm: this.opts.fileHwm,
+ headers,
+ highWaterMark: this.opts.highWaterMark,
+ isPartAFile: this.opts.isPartAFile,
+ limits: this.opts.limits,
+ parsedConType: parsed,
+ preservePath: this.opts.preservePath
+ }
- if (key) {
- return encodeUnreserved(key) + "=" + value;
- } else {
- return value;
+ if (MultipartParser.detect.test(parsed[0])) {
+ return new MultipartParser(this, cfg)
+ }
+ if (UrlencodedParser.detect.test(parsed[0])) {
+ return new UrlencodedParser(this, cfg)
}
+ throw new Error('Unsupported Content-Type.')
}
-function isDefined(value) {
- return value !== undefined && value !== null;
+Busboy.prototype._write = function (chunk, encoding, cb) {
+ this._parser.write(chunk, cb)
}
-function isKeyOperator(operator) {
- return operator === ";" || operator === "&" || operator === "?";
-}
+module.exports = Busboy
+module.exports["default"] = Busboy
+module.exports.Busboy = Busboy
-function getValues(context, operator, key, modifier) {
- var value = context[key],
- result = [];
+module.exports.Dicer = Dicer
- if (isDefined(value) && value !== "") {
- if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
- value = value.toString();
- if (modifier && modifier !== "*") {
- value = value.substring(0, parseInt(modifier, 10));
- }
+/***/ }),
- result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
- } else {
- if (modifier === "*") {
- if (Array.isArray(value)) {
- value.filter(isDefined).forEach(function (value) {
- result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
- });
- } else {
- Object.keys(value).forEach(function (k) {
- if (isDefined(value[k])) {
- result.push(encodeValue(operator, value[k], k));
- }
- });
- }
- } else {
- const tmp = [];
+/***/ 415:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- if (Array.isArray(value)) {
- value.filter(isDefined).forEach(function (value) {
- tmp.push(encodeValue(operator, value));
- });
- } else {
- Object.keys(value).forEach(function (k) {
- if (isDefined(value[k])) {
- tmp.push(encodeUnreserved(k));
- tmp.push(encodeValue(operator, value[k].toString()));
- }
- });
- }
+"use strict";
- if (isKeyOperator(operator)) {
- result.push(encodeUnreserved(key) + "=" + tmp.join(","));
- } else if (tmp.length !== 0) {
- result.push(tmp.join(","));
- }
- }
+
+// TODO:
+// * support 1 nested multipart level
+// (see second multipart example here:
+// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data)
+// * support limits.fieldNameSize
+// -- this will require modifications to utils.parseParams
+
+const { Readable } = __nccwpck_require__(4492)
+const { inherits } = __nccwpck_require__(7261)
+
+const Dicer = __nccwpck_require__(2856)
+
+const parseParams = __nccwpck_require__(4426)
+const decodeText = __nccwpck_require__(9136)
+const basename = __nccwpck_require__(496)
+const getLimit = __nccwpck_require__(9692)
+
+const RE_BOUNDARY = /^boundary$/i
+const RE_FIELD = /^form-data$/i
+const RE_CHARSET = /^charset$/i
+const RE_FILENAME = /^filename$/i
+const RE_NAME = /^name$/i
+
+Multipart.detect = /^multipart\/form-data/i
+function Multipart (boy, cfg) {
+ let i
+ let len
+ const self = this
+ let boundary
+ const limits = cfg.limits
+ const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))
+ const parsedConType = cfg.parsedConType || []
+ const defCharset = cfg.defCharset || 'utf8'
+ const preservePath = cfg.preservePath
+ const fileOpts = { highWaterMark: cfg.fileHwm }
+
+ for (i = 0, len = parsedConType.length; i < len; ++i) {
+ if (Array.isArray(parsedConType[i]) &&
+ RE_BOUNDARY.test(parsedConType[i][0])) {
+ boundary = parsedConType[i][1]
+ break
}
- } else {
- if (operator === ";") {
- if (isDefined(value)) {
- result.push(encodeUnreserved(key));
- }
- } else if (value === "" && (operator === "&" || operator === "?")) {
- result.push(encodeUnreserved(key) + "=");
- } else if (value === "") {
- result.push("");
+ }
+
+ function checkFinished () {
+ if (nends === 0 && finished && !boy._done) {
+ finished = false
+ self.end()
}
}
- return result;
-}
+ if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }
+
+ const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
+ const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)
+ const filesLimit = getLimit(limits, 'files', Infinity)
+ const fieldsLimit = getLimit(limits, 'fields', Infinity)
+ const partsLimit = getLimit(limits, 'parts', Infinity)
+ const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)
+ const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)
+
+ let nfiles = 0
+ let nfields = 0
+ let nends = 0
+ let curFile
+ let curField
+ let finished = false
+
+ this._needDrain = false
+ this._pause = false
+ this._cb = undefined
+ this._nparts = 0
+ this._boy = boy
+
+ const parserCfg = {
+ boundary,
+ maxHeaderPairs: headerPairsLimit,
+ maxHeaderSize: headerSizeLimit,
+ partHwm: fileOpts.highWaterMark,
+ highWaterMark: cfg.highWaterMark
+ }
-function parseUrl(template) {
- return {
- expand: expand.bind(null, template)
- };
-}
+ this.parser = new Dicer(parserCfg)
+ this.parser.on('drain', function () {
+ self._needDrain = false
+ if (self._cb && !self._pause) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ }).on('part', function onPart (part) {
+ if (++self._nparts > partsLimit) {
+ self.parser.removeListener('part', onPart)
+ self.parser.on('part', skipPart)
+ boy.hitPartsLimit = true
+ boy.emit('partsLimit')
+ return skipPart(part)
+ }
+
+ // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let
+ // us emit 'end' early since we know the part has ended if we are already
+ // seeing the next part
+ if (curField) {
+ const field = curField
+ field.emit('end')
+ field.removeAllListeners('end')
+ }
+
+ part.on('header', function (header) {
+ let contype
+ let fieldname
+ let parsed
+ let charset
+ let encoding
+ let filename
+ let nsize = 0
+
+ if (header['content-type']) {
+ parsed = parseParams(header['content-type'][0])
+ if (parsed[0]) {
+ contype = parsed[0].toLowerCase()
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_CHARSET.test(parsed[i][0])) {
+ charset = parsed[i][1].toLowerCase()
+ break
+ }
+ }
+ }
+ }
-function expand(template, context) {
- var operators = ["+", "#", ".", "/", ";", "?", "&"];
- return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
- if (expression) {
- let operator = "";
- const values = [];
+ if (contype === undefined) { contype = 'text/plain' }
+ if (charset === undefined) { charset = defCharset }
+
+ if (header['content-disposition']) {
+ parsed = parseParams(header['content-disposition'][0])
+ if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }
+ for (i = 0, len = parsed.length; i < len; ++i) {
+ if (RE_NAME.test(parsed[i][0])) {
+ fieldname = parsed[i][1]
+ } else if (RE_FILENAME.test(parsed[i][0])) {
+ filename = parsed[i][1]
+ if (!preservePath) { filename = basename(filename) }
+ }
+ }
+ } else { return skipPart(part) }
- if (operators.indexOf(expression.charAt(0)) !== -1) {
- operator = expression.charAt(0);
- expression = expression.substr(1);
- }
+ if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }
- expression.split(/,/g).forEach(function (variable) {
- var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
- values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
- });
+ let onData,
+ onEnd
+
+ if (isPartAFile(fieldname, contype, filename)) {
+ // file/binary field
+ if (nfiles === filesLimit) {
+ if (!boy.hitFilesLimit) {
+ boy.hitFilesLimit = true
+ boy.emit('filesLimit')
+ }
+ return skipPart(part)
+ }
+
+ ++nfiles
- if (operator && operator !== "+") {
- var separator = ",";
+ if (!boy._events.file) {
+ self.parser._ignore()
+ return
+ }
- if (operator === "?") {
- separator = "&";
- } else if (operator !== "#") {
- separator = operator;
+ ++nends
+ const file = new FileStream(fileOpts)
+ curFile = file
+ file.on('end', function () {
+ --nends
+ self._pause = false
+ checkFinished()
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ })
+ file._read = function (n) {
+ if (!self._pause) { return }
+ self._pause = false
+ if (self._cb && !self._needDrain) {
+ const cb = self._cb
+ self._cb = undefined
+ cb()
+ }
+ }
+ boy.emit('file', fieldname, file, filename, encoding, contype)
+
+ onData = function (data) {
+ if ((nsize += data.length) > fileSizeLimit) {
+ const extralen = fileSizeLimit - nsize + data.length
+ if (extralen > 0) { file.push(data.slice(0, extralen)) }
+ file.truncated = true
+ file.bytesRead = fileSizeLimit
+ part.removeAllListeners('data')
+ file.emit('limit')
+ return
+ } else if (!file.push(data)) { self._pause = true }
+
+ file.bytesRead = nsize
}
- return (values.length !== 0 ? operator : "") + values.join(separator);
+ onEnd = function () {
+ curFile = undefined
+ file.push(null)
+ }
} else {
- return values.join(",");
- }
- } else {
- return encodeReserved(literal);
- }
- });
-}
+ // non-file field
+ if (nfields === fieldsLimit) {
+ if (!boy.hitFieldsLimit) {
+ boy.hitFieldsLimit = true
+ boy.emit('fieldsLimit')
+ }
+ return skipPart(part)
+ }
-function parse(options) {
- // https://fetch.spec.whatwg.org/#methods
- let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
+ ++nfields
+ ++nends
+ let buffer = ''
+ let truncated = false
+ curField = part
+
+ onData = function (data) {
+ if ((nsize += data.length) > fieldSizeLimit) {
+ const extralen = (fieldSizeLimit - (nsize - data.length))
+ buffer += data.toString('binary', 0, extralen)
+ truncated = true
+ part.removeAllListeners('data')
+ } else { buffer += data.toString('binary') }
+ }
- let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
- let headers = Object.assign({}, options.headers);
- let body;
- let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
+ onEnd = function () {
+ curField = undefined
+ if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }
+ boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)
+ --nends
+ checkFinished()
+ }
+ }
- const urlVariableNames = extractUrlVariableNames(url);
- url = parseUrl(url).expand(parameters);
+ /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become
+ broken. Streams2/streams3 is a huge black box of confusion, but
+ somehow overriding the sync state seems to fix things again (and still
+ seems to work for previous node versions).
+ */
+ part._readableState.sync = false
+
+ part.on('data', onData)
+ part.on('end', onEnd)
+ }).on('error', function (err) {
+ if (curFile) { curFile.emit('error', err) }
+ })
+ }).on('error', function (err) {
+ boy.emit('error', err)
+ }).on('finish', function () {
+ finished = true
+ checkFinished()
+ })
+}
- if (!/^http/.test(url)) {
- url = options.baseUrl + url;
+Multipart.prototype.write = function (chunk, cb) {
+ const r = this.parser.write(chunk)
+ if (r && !this._pause) {
+ cb()
+ } else {
+ this._needDrain = !r
+ this._cb = cb
}
+}
- const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
- const remainingParameters = omit(parameters, omittedParameters);
- const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
+Multipart.prototype.end = function () {
+ const self = this
- if (!isBinaryRequest) {
- if (options.mediaType.format) {
- // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
- headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
- }
+ if (self.parser.writable) {
+ self.parser.end()
+ } else if (!self._boy._done) {
+ process.nextTick(function () {
+ self._boy._done = true
+ self._boy.emit('finish')
+ })
+ }
+}
- if (options.mediaType.previews.length) {
- const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
- headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
- const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
- return `application/vnd.github.${preview}-preview${format}`;
- }).join(",");
- }
- } // for GET/HEAD requests, set URL query parameters from remaining parameters
- // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
+function skipPart (part) {
+ part.resume()
+}
+function FileStream (opts) {
+ Readable.call(this, opts)
- if (["GET", "HEAD"].includes(method)) {
- url = addQueryParameters(url, remainingParameters);
- } else {
- if ("data" in remainingParameters) {
- body = remainingParameters.data;
- } else {
- if (Object.keys(remainingParameters).length) {
- body = remainingParameters;
- } else {
- headers["content-length"] = 0;
- }
- }
- } // default content-type for JSON if body is set
+ this.bytesRead = 0
+ this.truncated = false
+}
- if (!headers["content-type"] && typeof body !== "undefined") {
- headers["content-type"] = "application/json; charset=utf-8";
- } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
- // fetch does not allow to set `content-length` header, but we can set body to an empty string
+inherits(FileStream, Readable)
+FileStream.prototype._read = function (n) {}
- if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
- body = "";
- } // Only return body/request keys if present
-
-
- return Object.assign({
- method,
- url,
- headers
- }, typeof body !== "undefined" ? {
- body
- } : null, options.request ? {
- request: options.request
- } : null);
-}
-
-function endpointWithDefaults(defaults, route, options) {
- return parse(merge(defaults, route, options));
-}
-
-function withDefaults(oldDefaults, newDefaults) {
- const DEFAULTS = merge(oldDefaults, newDefaults);
- const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
- return Object.assign(endpoint, {
- DEFAULTS,
- defaults: withDefaults.bind(null, DEFAULTS),
- merge: merge.bind(null, DEFAULTS),
- parse
- });
-}
-
-const VERSION = "6.0.12";
-
-const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
-// So we use RequestParameters and add method as additional required property.
-
-const DEFAULTS = {
- method: "GET",
- baseUrl: "https://api.github.com",
- headers: {
- accept: "application/vnd.github.v3+json",
- "user-agent": userAgent
- },
- mediaType: {
- format: "",
- previews: []
- }
-};
-
-const endpoint = withDefaults(null, DEFAULTS);
-
-exports.endpoint = endpoint;
-//# sourceMappingURL=index.js.map
+module.exports = Multipart
/***/ }),
-/***/ 8467:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ 6780:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-var request = __nccwpck_require__(6234);
-var universalUserAgent = __nccwpck_require__(5030);
-
-const VERSION = "4.8.0";
-
-function _buildMessageForResponseErrors(data) {
- return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n");
-}
+const Decoder = __nccwpck_require__(9730)
+const decodeText = __nccwpck_require__(9136)
+const getLimit = __nccwpck_require__(9692)
-class GraphqlResponseError extends Error {
- constructor(request, headers, response) {
- super(_buildMessageForResponseErrors(response));
- this.request = request;
- this.headers = headers;
- this.response = response;
- this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties.
+const RE_CHARSET = /^charset$/i
- this.errors = response.errors;
- this.data = response.data; // Maintains proper stack trace (only available on V8)
+UrlEncoded.detect = /^application\/x-www-form-urlencoded/i
+function UrlEncoded (boy, cfg) {
+ const limits = cfg.limits
+ const parsedConType = cfg.parsedConType
+ this.boy = boy
- /* istanbul ignore next */
+ this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
+ this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)
+ this.fieldsLimit = getLimit(limits, 'fields', Infinity)
- if (Error.captureStackTrace) {
- Error.captureStackTrace(this, this.constructor);
+ let charset
+ for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var
+ if (Array.isArray(parsedConType[i]) &&
+ RE_CHARSET.test(parsedConType[i][0])) {
+ charset = parsedConType[i][1].toLowerCase()
+ break
}
}
-}
-
-const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
-const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
-const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
-function graphql(request, query, options) {
- if (options) {
- if (typeof query === "string" && "query" in options) {
- return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
- }
-
- for (const key in options) {
- if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;
- return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`));
- }
+ if (charset === undefined) { charset = cfg.defCharset || 'utf8' }
+
+ this.decoder = new Decoder()
+ this.charset = charset
+ this._fields = 0
+ this._state = 'key'
+ this._checkingBytes = true
+ this._bytesKey = 0
+ this._bytesVal = 0
+ this._key = ''
+ this._val = ''
+ this._keyTrunc = false
+ this._valTrunc = false
+ this._hitLimit = false
+}
+
+UrlEncoded.prototype.write = function (data, cb) {
+ if (this._fields === this.fieldsLimit) {
+ if (!this.boy.hitFieldsLimit) {
+ this.boy.hitFieldsLimit = true
+ this.boy.emit('fieldsLimit')
+ }
+ return cb()
}
- const parsedOptions = typeof query === "string" ? Object.assign({
- query
- }, options) : query;
- const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
- if (NON_VARIABLE_OPTIONS.includes(key)) {
- result[key] = parsedOptions[key];
- return result;
- }
-
- if (!result.variables) {
- result.variables = {};
- }
+ let idxeq; let idxamp; let i; let p = 0; const len = data.length
+
+ while (p < len) {
+ if (this._state === 'key') {
+ idxeq = idxamp = undefined
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) { ++p }
+ if (data[i] === 0x3D/* = */) {
+ idxeq = i
+ break
+ } else if (data[i] === 0x26/* & */) {
+ idxamp = i
+ break
+ }
+ if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
+ this._hitLimit = true
+ break
+ } else if (this._checkingBytes) { ++this._bytesKey }
+ }
- result.variables[key] = parsedOptions[key];
- return result;
- }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
- // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
+ if (idxeq !== undefined) {
+ // key with assignment
+ if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }
+ this._state = 'val'
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._val = ''
+ this._bytesVal = 0
+ this._valTrunc = false
+ this.decoder.reset()
+
+ p = idxeq + 1
+ } else if (idxamp !== undefined) {
+ // key with no assignment
+ ++this._fields
+ let key; const keyTrunc = this._keyTrunc
+ if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._key = ''
+ this._bytesKey = 0
+ this._keyTrunc = false
+ this.decoder.reset()
+
+ if (key.length) {
+ this.boy.emit('field', decodeText(key, 'binary', this.charset),
+ '',
+ keyTrunc,
+ false)
+ }
- const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
+ p = idxamp + 1
+ if (this._fields === this.fieldsLimit) { return cb() }
+ } else if (this._hitLimit) {
+ // we may not have hit the actual limit if there are encoded bytes...
+ if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }
+ p = i
+ if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
+ // yep, we actually did hit the limit
+ this._checkingBytes = false
+ this._keyTrunc = true
+ }
+ } else {
+ if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }
+ p = len
+ }
+ } else {
+ idxamp = undefined
+ for (i = p; i < len; ++i) {
+ if (!this._checkingBytes) { ++p }
+ if (data[i] === 0x26/* & */) {
+ idxamp = i
+ break
+ }
+ if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
+ this._hitLimit = true
+ break
+ } else if (this._checkingBytes) { ++this._bytesVal }
+ }
- if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
- requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
+ if (idxamp !== undefined) {
+ ++this._fields
+ if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ decodeText(this._val, 'binary', this.charset),
+ this._keyTrunc,
+ this._valTrunc)
+ this._state = 'key'
+
+ this._hitLimit = false
+ this._checkingBytes = true
+ this._key = ''
+ this._bytesKey = 0
+ this._keyTrunc = false
+ this.decoder.reset()
+
+ p = idxamp + 1
+ if (this._fields === this.fieldsLimit) { return cb() }
+ } else if (this._hitLimit) {
+ // we may not have hit the actual limit if there are encoded bytes...
+ if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }
+ p = i
+ if ((this._val === '' && this.fieldSizeLimit === 0) ||
+ (this._bytesVal = this._val.length) === this.fieldSizeLimit) {
+ // yep, we actually did hit the limit
+ this._checkingBytes = false
+ this._valTrunc = true
+ }
+ } else {
+ if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }
+ p = len
+ }
+ }
}
+ cb()
+}
+
+UrlEncoded.prototype.end = function () {
+ if (this.boy._done) { return }
+
+ if (this._state === 'key' && this._key.length > 0) {
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ '',
+ this._keyTrunc,
+ false)
+ } else if (this._state === 'val') {
+ this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
+ decodeText(this._val, 'binary', this.charset),
+ this._keyTrunc,
+ this._valTrunc)
+ }
+ this.boy._done = true
+ this.boy.emit('finish')
+}
- return request(requestOptions).then(response => {
- if (response.data.errors) {
- const headers = {};
+module.exports = UrlEncoded
- for (const key of Object.keys(response.headers)) {
- headers[key] = response.headers[key];
- }
- throw new GraphqlResponseError(requestOptions, headers, response.data);
- }
+/***/ }),
- return response.data.data;
- });
-}
+/***/ 9730:
+/***/ ((module) => {
-function withDefaults(request$1, newDefaults) {
- const newRequest = request$1.defaults(newDefaults);
+"use strict";
- const newApi = (query, options) => {
- return graphql(newRequest, query, options);
- };
- return Object.assign(newApi, {
- defaults: withDefaults.bind(null, newRequest),
- endpoint: request.request.endpoint
- });
+const RE_PLUS = /\+/g
+
+const HEX = [
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
+ 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+]
+
+function Decoder () {
+ this.buffer = undefined
+}
+Decoder.prototype.write = function (str) {
+ // Replace '+' with ' ' before decoding
+ str = str.replace(RE_PLUS, ' ')
+ let res = ''
+ let i = 0; let p = 0; const len = str.length
+ for (; i < len; ++i) {
+ if (this.buffer !== undefined) {
+ if (!HEX[str.charCodeAt(i)]) {
+ res += '%' + this.buffer
+ this.buffer = undefined
+ --i // retry character
+ } else {
+ this.buffer += str[i]
+ ++p
+ if (this.buffer.length === 2) {
+ res += String.fromCharCode(parseInt(this.buffer, 16))
+ this.buffer = undefined
+ }
+ }
+ } else if (str[i] === '%') {
+ if (i > p) {
+ res += str.substring(p, i)
+ p = i
+ }
+ this.buffer = ''
+ ++p
+ }
+ }
+ if (p < len && this.buffer === undefined) { res += str.substring(p) }
+ return res
}
-
-const graphql$1 = withDefaults(request.request, {
- headers: {
- "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
- },
- method: "POST",
- url: "/graphql"
-});
-function withCustomRequest(customRequest) {
- return withDefaults(customRequest, {
- method: "POST",
- url: "/graphql"
- });
+Decoder.prototype.reset = function () {
+ this.buffer = undefined
}
-exports.GraphqlResponseError = GraphqlResponseError;
-exports.graphql = graphql$1;
-exports.withCustomRequest = withCustomRequest;
-//# sourceMappingURL=index.js.map
+module.exports = Decoder
/***/ }),
-/***/ 4193:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ 496:
+/***/ ((module) => {
"use strict";
-Object.defineProperty(exports, "__esModule", ({ value: true }));
+module.exports = function basename (path) {
+ if (typeof path !== 'string') { return '' }
+ for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var
+ switch (path.charCodeAt(i)) {
+ case 0x2F: // '/'
+ case 0x5C: // '\'
+ path = path.slice(i + 1)
+ return (path === '..' || path === '.' ? '' : path)
+ }
+ }
+ return (path === '..' || path === '.' ? '' : path)
+}
-const VERSION = "2.21.3";
-function ownKeys(object, enumerableOnly) {
- var keys = Object.keys(object);
+/***/ }),
- if (Object.getOwnPropertySymbols) {
- var symbols = Object.getOwnPropertySymbols(object);
- enumerableOnly && (symbols = symbols.filter(function (sym) {
- return Object.getOwnPropertyDescriptor(object, sym).enumerable;
- })), keys.push.apply(keys, symbols);
- }
+/***/ 9136:
+/***/ ((module) => {
- return keys;
-}
+"use strict";
-function _objectSpread2(target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = null != arguments[i] ? arguments[i] : {};
- i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {
- _defineProperty(target, key, source[key]);
- }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {
- Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
- });
- }
- return target;
-}
+// Node has always utf-8
+const utf8Decoder = new TextDecoder('utf-8')
+const textDecoders = new Map([
+ ['utf-8', utf8Decoder],
+ ['utf8', utf8Decoder]
+])
-function _defineProperty(obj, key, value) {
- if (key in obj) {
- Object.defineProperty(obj, key, {
- value: value,
- enumerable: true,
- configurable: true,
- writable: true
- });
- } else {
- obj[key] = value;
+function decodeText (text, textEncoding, destEncoding) {
+ if (text) {
+ if (textDecoders.has(destEncoding)) {
+ try {
+ return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding))
+ } catch (e) { }
+ } else {
+ try {
+ textDecoders.set(destEncoding, new TextDecoder(destEncoding))
+ return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding))
+ } catch (e) { }
+ }
}
-
- return obj;
+ return text
}
-/**
- * Some “list” response that can be paginated have a different response structure
- *
- * They have a `total_count` key in the response (search also has `incomplete_results`,
- * /installation/repositories also has `repository_selection`), as well as a key with
- * the list of the items which name varies from endpoint to endpoint.
- *
- * Octokit normalizes these responses so that paginated results are always returned following
- * the same structure. One challenge is that if the list response has only one page, no Link
- * header is provided, so this header alone is not sufficient to check wether a response is
- * paginated or not.
- *
- * We check if a "total_count" key is present in the response data, but also make sure that
- * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
- * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
- */
-function normalizePaginatedListResponse(response) {
- // endpoints can respond with 204 if repository is empty
- if (!response.data) {
- return _objectSpread2(_objectSpread2({}, response), {}, {
- data: []
- });
- }
-
- const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
- if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
- // to retrieve the same information.
+module.exports = decodeText
- const incompleteResults = response.data.incomplete_results;
- const repositorySelection = response.data.repository_selection;
- const totalCount = response.data.total_count;
- delete response.data.incomplete_results;
- delete response.data.repository_selection;
- delete response.data.total_count;
- const namespaceKey = Object.keys(response.data)[0];
- const data = response.data[namespaceKey];
- response.data = data;
- if (typeof incompleteResults !== "undefined") {
- response.data.incomplete_results = incompleteResults;
- }
+/***/ }),
- if (typeof repositorySelection !== "undefined") {
- response.data.repository_selection = repositorySelection;
- }
+/***/ 9692:
+/***/ ((module) => {
- response.data.total_count = totalCount;
- return response;
-}
+"use strict";
-function iterator(octokit, route, parameters) {
- const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
- const requestMethod = typeof route === "function" ? route : octokit.request;
- const method = options.method;
- const headers = options.headers;
- let url = options.url;
- return {
- [Symbol.asyncIterator]: () => ({
- async next() {
- if (!url) return {
- done: true
- };
- try {
- const response = await requestMethod({
- method,
- url,
- headers
- });
- const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
- // '; rel="next", ; rel="last"'
- // sets `url` to undefined if "next" URL is not present or `link` header is not set
+module.exports = function getLimit (limits, name, defaultLimit) {
+ if (
+ !limits ||
+ limits[name] === undefined ||
+ limits[name] === null
+ ) { return defaultLimit }
- url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
- return {
- value: normalizedResponse
- };
- } catch (error) {
- if (error.status !== 409) throw error;
- url = "";
- return {
- value: {
- status: 200,
- headers: {},
- data: []
- }
- };
- }
- }
+ if (
+ typeof limits[name] !== 'number' ||
+ isNaN(limits[name])
+ ) { throw new TypeError('Limit ' + name + ' is not a valid number') }
- })
- };
+ return limits[name]
}
-function paginate(octokit, route, parameters, mapFn) {
- if (typeof parameters === "function") {
- mapFn = parameters;
- parameters = undefined;
- }
- return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
-}
+/***/ }),
-function gather(octokit, results, iterator, mapFn) {
- return iterator.next().then(result => {
- if (result.done) {
- return results;
- }
+/***/ 4426:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
- let earlyExit = false;
+"use strict";
- function done() {
- earlyExit = true;
- }
- results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
+const decodeText = __nccwpck_require__(9136)
- if (earlyExit) {
- return results;
- }
+const RE_ENCODED = /%([a-fA-F0-9]{2})/g
- return gather(octokit, results, iterator, mapFn);
- });
+function encodedReplacer (match, byte) {
+ return String.fromCharCode(parseInt(byte, 16))
}
-const composePaginateRest = Object.assign(paginate, {
- iterator
-});
-
-const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"];
+function parseParams (str) {
+ const res = []
+ let state = 'key'
+ let charset = ''
+ let inquote = false
+ let escaping = false
+ let p = 0
+ let tmp = ''
-function isPaginatingEndpoint(arg) {
- if (typeof arg === "string") {
- return paginatingEndpoints.includes(arg);
- } else {
- return false;
+ for (var i = 0, len = str.length; i < len; ++i) { // eslint-disable-line no-var
+ const char = str[i]
+ if (char === '\\' && inquote) {
+ if (escaping) { escaping = false } else {
+ escaping = true
+ continue
+ }
+ } else if (char === '"') {
+ if (!escaping) {
+ if (inquote) {
+ inquote = false
+ state = 'key'
+ } else { inquote = true }
+ continue
+ } else { escaping = false }
+ } else {
+ if (escaping && inquote) { tmp += '\\' }
+ escaping = false
+ if ((state === 'charset' || state === 'lang') && char === "'") {
+ if (state === 'charset') {
+ state = 'lang'
+ charset = tmp.substring(1)
+ } else { state = 'value' }
+ tmp = ''
+ continue
+ } else if (state === 'key' &&
+ (char === '*' || char === '=') &&
+ res.length) {
+ if (char === '*') { state = 'charset' } else { state = 'value' }
+ res[p] = [tmp, undefined]
+ tmp = ''
+ continue
+ } else if (!inquote && char === ';') {
+ state = 'key'
+ if (charset) {
+ if (tmp.length) {
+ tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
+ 'binary',
+ charset)
+ }
+ charset = ''
+ } else if (tmp.length) {
+ tmp = decodeText(tmp, 'binary', 'utf8')
+ }
+ if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }
+ tmp = ''
+ ++p
+ continue
+ } else if (!inquote && (char === ' ' || char === '\t')) { continue }
+ }
+ tmp += char
+ }
+ if (charset && tmp.length) {
+ tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
+ 'binary',
+ charset)
+ } else if (tmp) {
+ tmp = decodeText(tmp, 'binary', 'utf8')
}
-}
-/**
- * @param octokit Octokit instance
- * @param options Options passed to Octokit constructor
- */
+ if (res[p] === undefined) {
+ if (tmp) { res[p] = tmp }
+ } else { res[p][1] = tmp }
-function paginateRest(octokit) {
- return {
- paginate: Object.assign(paginate.bind(null, octokit), {
- iterator: iterator.bind(null, octokit)
- })
- };
+ return res
}
-paginateRest.VERSION = VERSION;
-exports.composePaginateRest = composePaginateRest;
-exports.isPaginatingEndpoint = isPaginatingEndpoint;
-exports.paginateRest = paginateRest;
-exports.paginatingEndpoints = paginatingEndpoints;
-//# sourceMappingURL=index.js.map
+module.exports = parseParams
/***/ }),
-/***/ 3044:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ 334:
+/***/ ((module) => {
"use strict";
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-function ownKeys(object, enumerableOnly) {
- var keys = Object.keys(object);
-
- if (Object.getOwnPropertySymbols) {
- var symbols = Object.getOwnPropertySymbols(object);
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ createTokenAuth: () => createTokenAuth
+});
+module.exports = __toCommonJS(dist_src_exports);
- if (enumerableOnly) {
- symbols = symbols.filter(function (sym) {
- return Object.getOwnPropertyDescriptor(object, sym).enumerable;
- });
- }
+// pkg/dist-src/auth.js
+var REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
+var REGEX_IS_INSTALLATION = /^ghs_/;
+var REGEX_IS_USER_TO_SERVER = /^ghu_/;
+async function auth(token) {
+ const isApp = token.split(/\./).length === 3;
+ const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
+ const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
+ const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
+ return {
+ type: "token",
+ token,
+ tokenType
+ };
+}
- keys.push.apply(keys, symbols);
+// pkg/dist-src/with-authorization-prefix.js
+function withAuthorizationPrefix(token) {
+ if (token.split(/\./).length === 3) {
+ return `bearer ${token}`;
}
+ return `token ${token}`;
+}
- return keys;
+// pkg/dist-src/hook.js
+async function hook(token, request, route, parameters) {
+ const endpoint = request.endpoint.merge(
+ route,
+ parameters
+ );
+ endpoint.headers.authorization = withAuthorizationPrefix(token);
+ return request(endpoint);
}
-function _objectSpread2(target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = arguments[i] != null ? arguments[i] : {};
+// pkg/dist-src/index.js
+var createTokenAuth = function createTokenAuth2(token) {
+ if (!token) {
+ throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
+ }
+ if (typeof token !== "string") {
+ throw new Error(
+ "[@octokit/auth-token] Token passed to createTokenAuth is not a string"
+ );
+ }
+ token = token.replace(/^(token|bearer) +/i, "");
+ return Object.assign(auth.bind(null, token), {
+ hook: hook.bind(null, token)
+ });
+};
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
- if (i % 2) {
- ownKeys(Object(source), true).forEach(function (key) {
- _defineProperty(target, key, source[key]);
- });
- } else if (Object.getOwnPropertyDescriptors) {
- Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
+/***/ }),
+
+/***/ 6762:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ Octokit: () => Octokit
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_universal_user_agent = __nccwpck_require__(5030);
+var import_before_after_hook = __nccwpck_require__(3682);
+var import_request = __nccwpck_require__(6234);
+var import_graphql = __nccwpck_require__(8467);
+var import_auth_token = __nccwpck_require__(334);
+
+// pkg/dist-src/version.js
+var VERSION = "5.0.1";
+
+// pkg/dist-src/index.js
+var Octokit = class {
+ static {
+ this.VERSION = VERSION;
+ }
+ static defaults(defaults) {
+ const OctokitWithDefaults = class extends this {
+ constructor(...args) {
+ const options = args[0] || {};
+ if (typeof defaults === "function") {
+ super(defaults(options));
+ return;
+ }
+ super(
+ Object.assign(
+ {},
+ defaults,
+ options,
+ options.userAgent && defaults.userAgent ? {
+ userAgent: `${options.userAgent} ${defaults.userAgent}`
+ } : null
+ )
+ );
+ }
+ };
+ return OctokitWithDefaults;
+ }
+ static {
+ this.plugins = [];
+ }
+ /**
+ * Attach a plugin (or many) to your Octokit instance.
+ *
+ * @example
+ * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
+ */
+ static plugin(...newPlugins) {
+ const currentPlugins = this.plugins;
+ const NewOctokit = class extends this {
+ static {
+ this.plugins = currentPlugins.concat(
+ newPlugins.filter((plugin) => !currentPlugins.includes(plugin))
+ );
+ }
+ };
+ return NewOctokit;
+ }
+ constructor(options = {}) {
+ const hook = new import_before_after_hook.Collection();
+ const requestDefaults = {
+ baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl,
+ headers: {},
+ request: Object.assign({}, options.request, {
+ // @ts-ignore internal usage only, no need to type
+ hook: hook.bind(null, "request")
+ }),
+ mediaType: {
+ previews: [],
+ format: ""
+ }
+ };
+ requestDefaults.headers["user-agent"] = [
+ options.userAgent,
+ `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`
+ ].filter(Boolean).join(" ");
+ if (options.baseUrl) {
+ requestDefaults.baseUrl = options.baseUrl;
+ }
+ if (options.previews) {
+ requestDefaults.mediaType.previews = options.previews;
+ }
+ if (options.timeZone) {
+ requestDefaults.headers["time-zone"] = options.timeZone;
+ }
+ this.request = import_request.request.defaults(requestDefaults);
+ this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults);
+ this.log = Object.assign(
+ {
+ debug: () => {
+ },
+ info: () => {
+ },
+ warn: console.warn.bind(console),
+ error: console.error.bind(console)
+ },
+ options.log
+ );
+ this.hook = hook;
+ if (!options.authStrategy) {
+ if (!options.auth) {
+ this.auth = async () => ({
+ type: "unauthenticated"
+ });
+ } else {
+ const auth = (0, import_auth_token.createTokenAuth)(options.auth);
+ hook.wrap("request", auth.hook);
+ this.auth = auth;
+ }
} else {
- ownKeys(Object(source)).forEach(function (key) {
- Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
- });
+ const { authStrategy, ...otherOptions } = options;
+ const auth = authStrategy(
+ Object.assign(
+ {
+ request: this.request,
+ log: this.log,
+ // we pass the current octokit instance as well as its constructor options
+ // to allow for authentication strategies that return a new octokit instance
+ // that shares the same internal state as the current one. The original
+ // requirement for this was the "event-octokit" authentication strategy
+ // of https://github.com/probot/octokit-auth-probot.
+ octokit: this,
+ octokitOptions: otherOptions
+ },
+ options.auth
+ )
+ );
+ hook.wrap("request", auth.hook);
+ this.auth = auth;
}
+ const classConstructor = this.constructor;
+ classConstructor.plugins.forEach((plugin) => {
+ Object.assign(this, plugin(this, options));
+ });
}
+};
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
- return target;
+
+/***/ }),
+
+/***/ 9440:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ endpoint: () => endpoint
+});
+module.exports = __toCommonJS(dist_src_exports);
+
+// pkg/dist-src/defaults.js
+var import_universal_user_agent = __nccwpck_require__(5030);
+
+// pkg/dist-src/version.js
+var VERSION = "9.0.1";
+
+// pkg/dist-src/defaults.js
+var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`;
+var DEFAULTS = {
+ method: "GET",
+ baseUrl: "https://api.github.com",
+ headers: {
+ accept: "application/vnd.github.v3+json",
+ "user-agent": userAgent
+ },
+ mediaType: {
+ format: ""
+ }
+};
+
+// pkg/dist-src/util/lowercase-keys.js
+function lowercaseKeys(object) {
+ if (!object) {
+ return {};
+ }
+ return Object.keys(object).reduce((newObj, key) => {
+ newObj[key.toLowerCase()] = object[key];
+ return newObj;
+ }, {});
}
-function _defineProperty(obj, key, value) {
- if (key in obj) {
- Object.defineProperty(obj, key, {
- value: value,
- enumerable: true,
- configurable: true,
- writable: true
- });
+// pkg/dist-src/util/merge-deep.js
+var import_is_plain_object = __nccwpck_require__(3287);
+function mergeDeep(defaults, options) {
+ const result = Object.assign({}, defaults);
+ Object.keys(options).forEach((key) => {
+ if ((0, import_is_plain_object.isPlainObject)(options[key])) {
+ if (!(key in defaults))
+ Object.assign(result, { [key]: options[key] });
+ else
+ result[key] = mergeDeep(defaults[key], options[key]);
+ } else {
+ Object.assign(result, { [key]: options[key] });
+ }
+ });
+ return result;
+}
+
+// pkg/dist-src/util/remove-undefined-properties.js
+function removeUndefinedProperties(obj) {
+ for (const key in obj) {
+ if (obj[key] === void 0) {
+ delete obj[key];
+ }
+ }
+ return obj;
+}
+
+// pkg/dist-src/merge.js
+function merge(defaults, route, options) {
+ if (typeof route === "string") {
+ let [method, url] = route.split(" ");
+ options = Object.assign(url ? { method, url } : { url: method }, options);
} else {
- obj[key] = value;
+ options = Object.assign({}, route);
+ }
+ options.headers = lowercaseKeys(options.headers);
+ removeUndefinedProperties(options);
+ removeUndefinedProperties(options.headers);
+ const mergedOptions = mergeDeep(defaults || {}, options);
+ if (options.url === "/graphql") {
+ if (defaults && defaults.mediaType.previews?.length) {
+ mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(
+ (preview) => !mergedOptions.mediaType.previews.includes(preview)
+ ).concat(mergedOptions.mediaType.previews);
+ }
+ mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, ""));
}
+ return mergedOptions;
+}
- return obj;
+// pkg/dist-src/util/add-query-parameters.js
+function addQueryParameters(url, parameters) {
+ const separator = /\?/.test(url) ? "&" : "?";
+ const names = Object.keys(parameters);
+ if (names.length === 0) {
+ return url;
+ }
+ return url + separator + names.map((name) => {
+ if (name === "q") {
+ return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
+ }
+ return `${name}=${encodeURIComponent(parameters[name])}`;
+ }).join("&");
}
-const Endpoints = {
- actions: {
- addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"],
- addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
- addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
- approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"],
- cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
- createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
- createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
- createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
- createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"],
- createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"],
- createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
- createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
- createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"],
- deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"],
- deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"],
- deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
- deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
- deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
- deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
- deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"],
- deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"],
- deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
- deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
- disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"],
- disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"],
- downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"],
- downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"],
- downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"],
- downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
- enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"],
- enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"],
- getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"],
- getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"],
- getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"],
- getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"],
- getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"],
- getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"],
- getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"],
- getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
- getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"],
- getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
- getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"],
- getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"],
- getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"],
- getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"],
- getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"],
- getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
- getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
- getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
- getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
- getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, {
- renamed: ["actions", "getGithubActionsPermissionsRepository"]
- }],
- getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
- getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
- getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"],
- getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
- getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
- getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
- getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"],
- getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
- getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"],
- getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
- getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"],
- listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
- listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"],
- listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
- listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"],
- listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"],
- listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
- listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
- listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
- listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
- listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
- listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"],
- listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"],
- listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"],
- listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
- listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
- listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
- listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
- listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
- reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"],
- reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
- reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"],
- removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"],
- removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
- removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"],
- removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"],
- removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
- reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
- setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"],
- setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"],
- setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"],
- setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
- setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"],
- setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"],
- setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"],
- setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"],
- setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"],
- setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"],
- setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"],
- setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"]
- },
- activity: {
- checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
- deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
- deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"],
- getFeeds: ["GET /feeds"],
- getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
- getThread: ["GET /notifications/threads/{thread_id}"],
- getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"],
- listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
- listNotificationsForAuthenticatedUser: ["GET /notifications"],
- listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"],
- listPublicEvents: ["GET /events"],
- listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
- listPublicEventsForUser: ["GET /users/{username}/events/public"],
- listPublicOrgEvents: ["GET /orgs/{org}/events"],
- listReceivedEventsForUser: ["GET /users/{username}/received_events"],
- listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"],
- listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
- listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"],
- listReposStarredByAuthenticatedUser: ["GET /user/starred"],
- listReposStarredByUser: ["GET /users/{username}/starred"],
- listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
- listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
- listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
- listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
- markNotificationsAsRead: ["PUT /notifications"],
- markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
- markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
- setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
- setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"],
- starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
- unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
- },
- apps: {
- addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, {
- renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"]
- }],
- addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"],
- checkToken: ["POST /applications/{client_id}/token"],
- createFromManifest: ["POST /app-manifests/{code}/conversions"],
- createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"],
- deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
- deleteInstallation: ["DELETE /app/installations/{installation_id}"],
- deleteToken: ["DELETE /applications/{client_id}/token"],
- getAuthenticated: ["GET /app"],
- getBySlug: ["GET /apps/{app_slug}"],
- getInstallation: ["GET /app/installations/{installation_id}"],
- getOrgInstallation: ["GET /orgs/{org}/installation"],
- getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
- getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"],
- getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"],
- getUserInstallation: ["GET /users/{username}/installation"],
- getWebhookConfigForApp: ["GET /app/hook/config"],
- getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"],
- listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
- listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"],
- listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"],
- listInstallations: ["GET /app/installations"],
- listInstallationsForAuthenticatedUser: ["GET /user/installations"],
- listPlans: ["GET /marketplace_listing/plans"],
- listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
- listReposAccessibleToInstallation: ["GET /installation/repositories"],
- listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
- listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"],
- listWebhookDeliveries: ["GET /app/hook/deliveries"],
- redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"],
- removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, {
- renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"]
- }],
- removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"],
- resetToken: ["PATCH /applications/{client_id}/token"],
- revokeInstallationAccessToken: ["DELETE /installation/token"],
- scopeToken: ["POST /applications/{client_id}/token/scoped"],
- suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
- unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"],
- updateWebhookConfigForApp: ["PATCH /app/hook/config"]
- },
- billing: {
- getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
- getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"],
- getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"],
- getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"],
- getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
- getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"],
- getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"],
- getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"]
- },
- checks: {
- create: ["POST /repos/{owner}/{repo}/check-runs"],
- createSuite: ["POST /repos/{owner}/{repo}/check-suites"],
- get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"],
- getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"],
- listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"],
- listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"],
- listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"],
- listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"],
- rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"],
- rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"],
- setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"],
- update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]
- },
- codeScanning: {
- deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"],
- getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, {
- renamedParameters: {
- alert_id: "alert_number"
+// pkg/dist-src/util/extract-url-variable-names.js
+var urlVariableRegex = /\{[^}]+\}/g;
+function removeNonChars(variableName) {
+ return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
+}
+function extractUrlVariableNames(url) {
+ const matches = url.match(urlVariableRegex);
+ if (!matches) {
+ return [];
+ }
+ return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
+}
+
+// pkg/dist-src/util/omit.js
+function omit(object, keysToOmit) {
+ return Object.keys(object).filter((option) => !keysToOmit.includes(option)).reduce((obj, key) => {
+ obj[key] = object[key];
+ return obj;
+ }, {});
+}
+
+// pkg/dist-src/util/url-template.js
+function encodeReserved(str) {
+ return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {
+ if (!/%[0-9A-Fa-f]/.test(part)) {
+ part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
+ }
+ return part;
+ }).join("");
+}
+function encodeUnreserved(str) {
+ return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {
+ return "%" + c.charCodeAt(0).toString(16).toUpperCase();
+ });
+}
+function encodeValue(operator, value, key) {
+ value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
+ if (key) {
+ return encodeUnreserved(key) + "=" + value;
+ } else {
+ return value;
+ }
+}
+function isDefined(value) {
+ return value !== void 0 && value !== null;
+}
+function isKeyOperator(operator) {
+ return operator === ";" || operator === "&" || operator === "?";
+}
+function getValues(context, operator, key, modifier) {
+ var value = context[key], result = [];
+ if (isDefined(value) && value !== "") {
+ if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
+ value = value.toString();
+ if (modifier && modifier !== "*") {
+ value = value.substring(0, parseInt(modifier, 10));
}
- }],
- getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"],
- getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
- listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"],
- listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"],
- listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
- listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, {
- renamed: ["codeScanning", "listAlertInstances"]
- }],
- listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
- updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"],
- uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
- },
- codesOfConduct: {
- getAllCodesOfConduct: ["GET /codes_of_conduct"],
- getConductCode: ["GET /codes_of_conduct/{key}"]
- },
- codespaces: {
- addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"],
- codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"],
- createForAuthenticatedUser: ["POST /user/codespaces"],
- createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
- createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"],
- createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"],
- createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"],
- deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"],
- deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"],
- deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
- deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"],
- exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"],
- getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"],
- getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"],
- getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"],
- getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"],
- getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
- getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"],
- listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"],
- listForAuthenticatedUser: ["GET /user/codespaces"],
- listInOrganization: ["GET /orgs/{org}/codespaces", {}, {
- renamedParameters: {
- org_id: "org"
+ result.push(
+ encodeValue(operator, value, isKeyOperator(operator) ? key : "")
+ );
+ } else {
+ if (modifier === "*") {
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function(value2) {
+ result.push(
+ encodeValue(operator, value2, isKeyOperator(operator) ? key : "")
+ );
+ });
+ } else {
+ Object.keys(value).forEach(function(k) {
+ if (isDefined(value[k])) {
+ result.push(encodeValue(operator, value[k], k));
+ }
+ });
+ }
+ } else {
+ const tmp = [];
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function(value2) {
+ tmp.push(encodeValue(operator, value2));
+ });
+ } else {
+ Object.keys(value).forEach(function(k) {
+ if (isDefined(value[k])) {
+ tmp.push(encodeUnreserved(k));
+ tmp.push(encodeValue(operator, value[k].toString()));
+ }
+ });
+ }
+ if (isKeyOperator(operator)) {
+ result.push(encodeUnreserved(key) + "=" + tmp.join(","));
+ } else if (tmp.length !== 0) {
+ result.push(tmp.join(","));
+ }
}
- }],
- listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"],
- listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"],
- listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"],
- listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"],
- removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"],
- repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"],
- setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"],
- startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"],
- stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"],
- stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"],
- updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"]
- },
- dependabot: {
- addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"],
- createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"],
- createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
- deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"],
- deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
- getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"],
- getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"],
- getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"],
- getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
- listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"],
- listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"],
- listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"],
- removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"],
- setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]
- },
- dependencyGraph: {
- createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"],
- diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"]
- },
- emojis: {
- get: ["GET /emojis"]
- },
- enterpriseAdmin: {
- addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
- disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
- enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
- getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"],
- getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"],
- getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"],
- listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
- listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"],
- removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
- removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"],
- setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"],
- setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
- setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"],
- setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"]
- },
- gists: {
- checkIsStarred: ["GET /gists/{gist_id}/star"],
- create: ["POST /gists"],
- createComment: ["POST /gists/{gist_id}/comments"],
- delete: ["DELETE /gists/{gist_id}"],
- deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
- fork: ["POST /gists/{gist_id}/forks"],
- get: ["GET /gists/{gist_id}"],
- getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
- getRevision: ["GET /gists/{gist_id}/{sha}"],
- list: ["GET /gists"],
- listComments: ["GET /gists/{gist_id}/comments"],
- listCommits: ["GET /gists/{gist_id}/commits"],
- listForUser: ["GET /users/{username}/gists"],
- listForks: ["GET /gists/{gist_id}/forks"],
- listPublic: ["GET /gists/public"],
- listStarred: ["GET /gists/starred"],
- star: ["PUT /gists/{gist_id}/star"],
- unstar: ["DELETE /gists/{gist_id}/star"],
- update: ["PATCH /gists/{gist_id}"],
- updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
- },
- git: {
- createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
- createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
- createRef: ["POST /repos/{owner}/{repo}/git/refs"],
- createTag: ["POST /repos/{owner}/{repo}/git/tags"],
- createTree: ["POST /repos/{owner}/{repo}/git/trees"],
- deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
- getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
- getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
- getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
- getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
- getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
- listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
- updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
- },
- gitignore: {
- getAllTemplates: ["GET /gitignore/templates"],
- getTemplate: ["GET /gitignore/templates/{name}"]
- },
- interactions: {
- getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"],
- getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"],
- getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"],
- getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, {
- renamed: ["interactions", "getRestrictionsForAuthenticatedUser"]
- }],
- removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"],
- removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"],
- removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"],
- removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, {
- renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"]
- }],
- setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"],
- setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"],
- setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"],
- setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, {
- renamed: ["interactions", "setRestrictionsForAuthenticatedUser"]
- }]
- },
- issues: {
- addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
- addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
- checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
- create: ["POST /repos/{owner}/{repo}/issues"],
- createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"],
- createLabel: ["POST /repos/{owner}/{repo}/labels"],
- createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
- deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"],
- deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
- deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"],
- get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
- getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
- getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
- getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
- getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
- list: ["GET /issues"],
- listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
- listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
- listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
- listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
- listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
- listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"],
- listForAuthenticatedUser: ["GET /user/issues"],
- listForOrg: ["GET /orgs/{org}/issues"],
- listForRepo: ["GET /repos/{owner}/{repo}/issues"],
- listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"],
- listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
- listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"],
- listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
- lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
- removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"],
- removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
- removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"],
- setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
- unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
- update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
- updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
- updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
- updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]
- },
- licenses: {
- get: ["GET /licenses/{license}"],
- getAllCommonlyUsed: ["GET /licenses"],
- getForRepo: ["GET /repos/{owner}/{repo}/license"]
- },
- markdown: {
- render: ["POST /markdown"],
- renderRaw: ["POST /markdown/raw", {
- headers: {
- "content-type": "text/plain; charset=utf-8"
+ }
+ } else {
+ if (operator === ";") {
+ if (isDefined(value)) {
+ result.push(encodeUnreserved(key));
}
- }]
- },
- meta: {
- get: ["GET /meta"],
- getOctocat: ["GET /octocat"],
- getZen: ["GET /zen"],
- root: ["GET /"]
- },
- migrations: {
- cancelImport: ["DELETE /repos/{owner}/{repo}/import"],
- deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"],
- deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"],
- downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"],
- getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"],
- getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"],
- getImportStatus: ["GET /repos/{owner}/{repo}/import"],
- getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"],
- getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"],
- getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"],
- listForAuthenticatedUser: ["GET /user/migrations"],
- listForOrg: ["GET /orgs/{org}/migrations"],
- listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"],
- listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"],
- listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, {
- renamed: ["migrations", "listReposForAuthenticatedUser"]
- }],
- mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"],
- setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"],
- startForAuthenticatedUser: ["POST /user/migrations"],
- startForOrg: ["POST /orgs/{org}/migrations"],
- startImport: ["PUT /repos/{owner}/{repo}/import"],
- unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"],
- unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"],
- updateImport: ["PATCH /repos/{owner}/{repo}/import"]
- },
- orgs: {
- blockUser: ["PUT /orgs/{org}/blocks/{username}"],
- cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"],
- checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
- checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
- checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
- convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"],
- createInvitation: ["POST /orgs/{org}/invitations"],
- createWebhook: ["POST /orgs/{org}/hooks"],
- deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
- get: ["GET /orgs/{org}"],
- getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
- getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
- getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
- getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"],
- getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"],
- list: ["GET /organizations"],
- listAppInstallations: ["GET /orgs/{org}/installations"],
- listBlockedUsers: ["GET /orgs/{org}/blocks"],
- listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"],
- listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
- listForAuthenticatedUser: ["GET /user/orgs"],
- listForUser: ["GET /users/{username}/orgs"],
- listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
- listMembers: ["GET /orgs/{org}/members"],
- listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
- listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
- listPendingInvitations: ["GET /orgs/{org}/invitations"],
- listPublicMembers: ["GET /orgs/{org}/public_members"],
- listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"],
- listWebhooks: ["GET /orgs/{org}/hooks"],
- pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
- redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"],
- removeMember: ["DELETE /orgs/{org}/members/{username}"],
- removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
- removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"],
- removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"],
- setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
- setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"],
- unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
- update: ["PATCH /orgs/{org}"],
- updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"],
- updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"],
- updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"]
- },
- packages: {
- deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"],
- deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"],
- deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"],
- deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, {
- renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"]
- }],
- getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, {
- renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"]
- }],
- getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"],
- getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"],
- getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"],
- getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"],
- getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"],
- getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"],
- getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
- listPackagesForAuthenticatedUser: ["GET /user/packages"],
- listPackagesForOrganization: ["GET /orgs/{org}/packages"],
- listPackagesForUser: ["GET /users/{username}/packages"],
- restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"],
- restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"],
- restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"],
- restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"],
- restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"],
- restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]
- },
- projects: {
- addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"],
- createCard: ["POST /projects/columns/{column_id}/cards"],
- createColumn: ["POST /projects/{project_id}/columns"],
- createForAuthenticatedUser: ["POST /user/projects"],
- createForOrg: ["POST /orgs/{org}/projects"],
- createForRepo: ["POST /repos/{owner}/{repo}/projects"],
- delete: ["DELETE /projects/{project_id}"],
- deleteCard: ["DELETE /projects/columns/cards/{card_id}"],
- deleteColumn: ["DELETE /projects/columns/{column_id}"],
- get: ["GET /projects/{project_id}"],
- getCard: ["GET /projects/columns/cards/{card_id}"],
- getColumn: ["GET /projects/columns/{column_id}"],
- getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"],
- listCards: ["GET /projects/columns/{column_id}/cards"],
- listCollaborators: ["GET /projects/{project_id}/collaborators"],
- listColumns: ["GET /projects/{project_id}/columns"],
- listForOrg: ["GET /orgs/{org}/projects"],
- listForRepo: ["GET /repos/{owner}/{repo}/projects"],
- listForUser: ["GET /users/{username}/projects"],
- moveCard: ["POST /projects/columns/cards/{card_id}/moves"],
- moveColumn: ["POST /projects/columns/{column_id}/moves"],
- removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"],
- update: ["PATCH /projects/{project_id}"],
- updateCard: ["PATCH /projects/columns/cards/{card_id}"],
- updateColumn: ["PATCH /projects/columns/{column_id}"]
- },
- pulls: {
- checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
- create: ["POST /repos/{owner}/{repo}/pulls"],
- createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"],
- createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
- createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
- deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
- deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
- dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"],
- get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
- getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
- getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
- list: ["GET /repos/{owner}/{repo}/pulls"],
- listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"],
- listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
- listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
- listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
- listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
- listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
- listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
- merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
- removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
- requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
- submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"],
- update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
- updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"],
- updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
- updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"]
- },
- rateLimit: {
- get: ["GET /rate_limit"]
- },
- reactions: {
- createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"],
- createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
- createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"],
- createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"],
- createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"],
- createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"],
- createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"],
- deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"],
- deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"],
- deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"],
- deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"],
- deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"],
- deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"],
- deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"],
- listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"],
- listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
- listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"],
- listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"],
- listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"],
- listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"],
- listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]
- },
- repos: {
- acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, {
- renamed: ["repos", "acceptInvitationForAuthenticatedUser"]
- }],
- acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"],
- addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
- mapToData: "apps"
- }],
- addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
- addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
- mapToData: "contexts"
- }],
- addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
- mapToData: "teams"
- }],
- addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
- mapToData: "users"
- }],
- checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
- checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"],
- codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"],
- compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
- compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"],
- createAutolink: ["POST /repos/{owner}/{repo}/autolinks"],
- createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
- createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
- createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
- createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
- createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
- createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
- createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
- createForAuthenticatedUser: ["POST /user/repos"],
- createFork: ["POST /repos/{owner}/{repo}/forks"],
- createInOrg: ["POST /orgs/{org}/repos"],
- createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"],
- createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
- createPagesSite: ["POST /repos/{owner}/{repo}/pages"],
- createRelease: ["POST /repos/{owner}/{repo}/releases"],
- createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"],
- createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"],
- createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
- declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, {
- renamed: ["repos", "declineInvitationForAuthenticatedUser"]
- }],
- declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"],
- delete: ["DELETE /repos/{owner}/{repo}"],
- deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
- deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
- deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"],
- deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"],
- deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"],
- deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
- deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
- deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
- deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"],
- deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
- deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"],
- deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"],
- deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
- deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
- deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"],
- deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"],
- deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
- disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"],
- disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"],
- disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"],
- downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, {
- renamed: ["repos", "downloadZipballArchive"]
- }],
- downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"],
- downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"],
- enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"],
- enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"],
- enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"],
- generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"],
- get: ["GET /repos/{owner}/{repo}"],
- getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
- getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
- getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
- getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"],
- getAllTopics: ["GET /repos/{owner}/{repo}/topics"],
- getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"],
- getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"],
- getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
- getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"],
- getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
- getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
- getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"],
- getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
- getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
- getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
- getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
- getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
- getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
- getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
- getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
- getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
- getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
- getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"],
- getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"],
- getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
- getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
- getPages: ["GET /repos/{owner}/{repo}/pages"],
- getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
- getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"],
- getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
- getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
- getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
- getReadme: ["GET /repos/{owner}/{repo}/readme"],
- getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"],
- getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
- getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
- getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
- getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
- getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"],
- getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
- getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
- getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"],
- getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
- getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
- getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"],
- getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"],
- listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"],
- listBranches: ["GET /repos/{owner}/{repo}/branches"],
- listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"],
- listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
- listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
- listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
- listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"],
- listCommits: ["GET /repos/{owner}/{repo}/commits"],
- listContributors: ["GET /repos/{owner}/{repo}/contributors"],
- listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
- listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
- listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
- listForAuthenticatedUser: ["GET /user/repos"],
- listForOrg: ["GET /orgs/{org}/repos"],
- listForUser: ["GET /users/{username}/repos"],
- listForks: ["GET /repos/{owner}/{repo}/forks"],
- listInvitations: ["GET /repos/{owner}/{repo}/invitations"],
- listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"],
- listLanguages: ["GET /repos/{owner}/{repo}/languages"],
- listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
- listPublic: ["GET /repositories"],
- listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"],
- listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"],
- listReleases: ["GET /repos/{owner}/{repo}/releases"],
- listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"],
- listTags: ["GET /repos/{owner}/{repo}/tags"],
- listTeams: ["GET /repos/{owner}/{repo}/teams"],
- listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"],
- listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
- merge: ["POST /repos/{owner}/{repo}/merges"],
- mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"],
- pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
- redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"],
- removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
- mapToData: "apps"
- }],
- removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"],
- removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
- mapToData: "contexts"
- }],
- removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
- removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
- mapToData: "teams"
- }],
- removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
- mapToData: "users"
- }],
- renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
- replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
- requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
- setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
- setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
- mapToData: "apps"
- }],
- setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
- mapToData: "contexts"
- }],
- setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
- mapToData: "teams"
- }],
- setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
- mapToData: "users"
- }],
- testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
- transfer: ["POST /repos/{owner}/{repo}/transfer"],
- update: ["PATCH /repos/{owner}/{repo}"],
- updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"],
- updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
- updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
- updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"],
- updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
- updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
- updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"],
- updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
- renamed: ["repos", "updateStatusCheckProtection"]
- }],
- updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
- updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
- updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"],
- uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
- baseUrl: "https://uploads.github.com"
- }]
- },
- search: {
- code: ["GET /search/code"],
- commits: ["GET /search/commits"],
- issuesAndPullRequests: ["GET /search/issues"],
- labels: ["GET /search/labels"],
- repos: ["GET /search/repositories"],
- topics: ["GET /search/topics"],
- users: ["GET /search/users"]
- },
- secretScanning: {
- getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"],
- listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"],
- listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
- listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
- listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"],
- updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]
- },
- teams: {
- addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"],
- addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
- addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
- checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
- checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
- create: ["POST /orgs/{org}/teams"],
- createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
- createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
- deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
- deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
- deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
- getByName: ["GET /orgs/{org}/teams/{team_slug}"],
- getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
- getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
- getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"],
- list: ["GET /orgs/{org}/teams"],
- listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
- listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
- listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
- listForAuthenticatedUser: ["GET /user/teams"],
- listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
- listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"],
- listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"],
- listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
- removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"],
- removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
- removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
- updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
- updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
- updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
- },
- users: {
- addEmailForAuthenticated: ["POST /user/emails", {}, {
- renamed: ["users", "addEmailForAuthenticatedUser"]
- }],
- addEmailForAuthenticatedUser: ["POST /user/emails"],
- block: ["PUT /user/blocks/{username}"],
- checkBlocked: ["GET /user/blocks/{username}"],
- checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
- checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
- createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, {
- renamed: ["users", "createGpgKeyForAuthenticatedUser"]
- }],
- createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"],
- createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, {
- renamed: ["users", "createPublicSshKeyForAuthenticatedUser"]
- }],
- createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"],
- deleteEmailForAuthenticated: ["DELETE /user/emails", {}, {
- renamed: ["users", "deleteEmailForAuthenticatedUser"]
- }],
- deleteEmailForAuthenticatedUser: ["DELETE /user/emails"],
- deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, {
- renamed: ["users", "deleteGpgKeyForAuthenticatedUser"]
- }],
- deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"],
- deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, {
- renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"]
- }],
- deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"],
- follow: ["PUT /user/following/{username}"],
- getAuthenticated: ["GET /user"],
- getByUsername: ["GET /users/{username}"],
- getContextForUser: ["GET /users/{username}/hovercard"],
- getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, {
- renamed: ["users", "getGpgKeyForAuthenticatedUser"]
- }],
- getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"],
- getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, {
- renamed: ["users", "getPublicSshKeyForAuthenticatedUser"]
- }],
- getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"],
- list: ["GET /users"],
- listBlockedByAuthenticated: ["GET /user/blocks", {}, {
- renamed: ["users", "listBlockedByAuthenticatedUser"]
- }],
- listBlockedByAuthenticatedUser: ["GET /user/blocks"],
- listEmailsForAuthenticated: ["GET /user/emails", {}, {
- renamed: ["users", "listEmailsForAuthenticatedUser"]
- }],
- listEmailsForAuthenticatedUser: ["GET /user/emails"],
- listFollowedByAuthenticated: ["GET /user/following", {}, {
- renamed: ["users", "listFollowedByAuthenticatedUser"]
- }],
- listFollowedByAuthenticatedUser: ["GET /user/following"],
- listFollowersForAuthenticatedUser: ["GET /user/followers"],
- listFollowersForUser: ["GET /users/{username}/followers"],
- listFollowingForUser: ["GET /users/{username}/following"],
- listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, {
- renamed: ["users", "listGpgKeysForAuthenticatedUser"]
- }],
- listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"],
- listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
- listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, {
- renamed: ["users", "listPublicEmailsForAuthenticatedUser"]
- }],
- listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"],
- listPublicKeysForUser: ["GET /users/{username}/keys"],
- listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, {
- renamed: ["users", "listPublicSshKeysForAuthenticatedUser"]
- }],
- listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"],
- setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, {
- renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"]
- }],
- setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"],
- unblock: ["DELETE /user/blocks/{username}"],
- unfollow: ["DELETE /user/following/{username}"],
- updateAuthenticated: ["PATCH /user"]
- }
-};
-
-const VERSION = "5.16.2";
-
-function endpointsToMethods(octokit, endpointsMap) {
- const newMethods = {};
-
- for (const [scope, endpoints] of Object.entries(endpointsMap)) {
- for (const [methodName, endpoint] of Object.entries(endpoints)) {
- const [route, defaults, decorations] = endpoint;
- const [method, url] = route.split(/ /);
- const endpointDefaults = Object.assign({
- method,
- url
- }, defaults);
-
- if (!newMethods[scope]) {
- newMethods[scope] = {};
- }
-
- const scopeMethods = newMethods[scope];
-
- if (decorations) {
- scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
- continue;
- }
-
- scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
+ } else if (value === "" && (operator === "&" || operator === "?")) {
+ result.push(encodeUnreserved(key) + "=");
+ } else if (value === "") {
+ result.push("");
}
}
-
- return newMethods;
+ return result;
}
-
-function decorate(octokit, scope, methodName, defaults, decorations) {
- const requestWithDefaults = octokit.request.defaults(defaults);
- /* istanbul ignore next */
-
- function withDecorations(...args) {
- // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
- let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
-
- if (decorations.mapToData) {
- options = Object.assign({}, options, {
- data: options[decorations.mapToData],
- [decorations.mapToData]: undefined
- });
- return requestWithDefaults(options);
- }
-
- if (decorations.renamed) {
- const [newScope, newMethodName] = decorations.renamed;
- octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
- }
-
- if (decorations.deprecated) {
- octokit.log.warn(decorations.deprecated);
- }
-
- if (decorations.renamedParameters) {
- // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
- const options = requestWithDefaults.endpoint.merge(...args);
-
- for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
- if (name in options) {
- octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
-
- if (!(alias in options)) {
- options[alias] = options[name];
+function parseUrl(template) {
+ return {
+ expand: expand.bind(null, template)
+ };
+}
+function expand(template, context) {
+ var operators = ["+", "#", ".", "/", ";", "?", "&"];
+ return template.replace(
+ /\{([^\{\}]+)\}|([^\{\}]+)/g,
+ function(_, expression, literal) {
+ if (expression) {
+ let operator = "";
+ const values = [];
+ if (operators.indexOf(expression.charAt(0)) !== -1) {
+ operator = expression.charAt(0);
+ expression = expression.substr(1);
+ }
+ expression.split(/,/g).forEach(function(variable) {
+ var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
+ values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
+ });
+ if (operator && operator !== "+") {
+ var separator = ",";
+ if (operator === "?") {
+ separator = "&";
+ } else if (operator !== "#") {
+ separator = operator;
}
-
- delete options[name];
+ return (values.length !== 0 ? operator : "") + values.join(separator);
+ } else {
+ return values.join(",");
}
+ } else {
+ return encodeReserved(literal);
}
+ }
+ );
+}
- return requestWithDefaults(options);
- } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
-
-
- return requestWithDefaults(...args);
+// pkg/dist-src/parse.js
+function parse(options) {
+ let method = options.method.toUpperCase();
+ let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
+ let headers = Object.assign({}, options.headers);
+ let body;
+ let parameters = omit(options, [
+ "method",
+ "baseUrl",
+ "url",
+ "headers",
+ "request",
+ "mediaType"
+ ]);
+ const urlVariableNames = extractUrlVariableNames(url);
+ url = parseUrl(url).expand(parameters);
+ if (!/^http/.test(url)) {
+ url = options.baseUrl + url;
}
-
- return Object.assign(withDecorations, requestWithDefaults);
+ const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl");
+ const remainingParameters = omit(parameters, omittedParameters);
+ const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
+ if (!isBinaryRequest) {
+ if (options.mediaType.format) {
+ headers.accept = headers.accept.split(/,/).map(
+ (format) => format.replace(
+ /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/,
+ `application/vnd$1$2.${options.mediaType.format}`
+ )
+ ).join(",");
+ }
+ if (url.endsWith("/graphql")) {
+ if (options.mediaType.previews?.length) {
+ const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
+ headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {
+ const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
+ return `application/vnd.github.${preview}-preview${format}`;
+ }).join(",");
+ }
+ }
+ }
+ if (["GET", "HEAD"].includes(method)) {
+ url = addQueryParameters(url, remainingParameters);
+ } else {
+ if ("data" in remainingParameters) {
+ body = remainingParameters.data;
+ } else {
+ if (Object.keys(remainingParameters).length) {
+ body = remainingParameters;
+ }
+ }
+ }
+ if (!headers["content-type"] && typeof body !== "undefined") {
+ headers["content-type"] = "application/json; charset=utf-8";
+ }
+ if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
+ body = "";
+ }
+ return Object.assign(
+ { method, url, headers },
+ typeof body !== "undefined" ? { body } : null,
+ options.request ? { request: options.request } : null
+ );
}
-function restEndpointMethods(octokit) {
- const api = endpointsToMethods(octokit, Endpoints);
- return {
- rest: api
- };
+// pkg/dist-src/endpoint-with-defaults.js
+function endpointWithDefaults(defaults, route, options) {
+ return parse(merge(defaults, route, options));
}
-restEndpointMethods.VERSION = VERSION;
-function legacyRestEndpointMethods(octokit) {
- const api = endpointsToMethods(octokit, Endpoints);
- return _objectSpread2(_objectSpread2({}, api), {}, {
- rest: api
+
+// pkg/dist-src/with-defaults.js
+function withDefaults(oldDefaults, newDefaults) {
+ const DEFAULTS2 = merge(oldDefaults, newDefaults);
+ const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2);
+ return Object.assign(endpoint2, {
+ DEFAULTS: DEFAULTS2,
+ defaults: withDefaults.bind(null, DEFAULTS2),
+ merge: merge.bind(null, DEFAULTS2),
+ parse
});
}
-legacyRestEndpointMethods.VERSION = VERSION;
-exports.legacyRestEndpointMethods = legacyRestEndpointMethods;
-exports.restEndpointMethods = restEndpointMethods;
-//# sourceMappingURL=index.js.map
+// pkg/dist-src/index.js
+var endpoint = withDefaults(null, DEFAULTS);
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
-/***/ 537:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ 8467:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ GraphqlResponseError: () => GraphqlResponseError,
+ graphql: () => graphql2,
+ withCustomRequest: () => withCustomRequest
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_request3 = __nccwpck_require__(6234);
+var import_universal_user_agent = __nccwpck_require__(5030);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
-
-var deprecation = __nccwpck_require__(8932);
-var once = _interopDefault(__nccwpck_require__(1223));
-
-const logOnceCode = once(deprecation => console.warn(deprecation));
-const logOnceHeaders = once(deprecation => console.warn(deprecation));
-/**
- * Error with extra properties to help with debugging
- */
+// pkg/dist-src/version.js
+var VERSION = "7.0.2";
-class RequestError extends Error {
- constructor(message, statusCode, options) {
- super(message); // Maintains proper stack trace (only available on V8)
+// pkg/dist-src/with-defaults.js
+var import_request2 = __nccwpck_require__(6234);
- /* istanbul ignore next */
+// pkg/dist-src/graphql.js
+var import_request = __nccwpck_require__(6234);
+// pkg/dist-src/error.js
+function _buildMessageForResponseErrors(data) {
+ return `Request failed due to following response errors:
+` + data.errors.map((e) => ` - ${e.message}`).join("\n");
+}
+var GraphqlResponseError = class extends Error {
+ constructor(request2, headers, response) {
+ super(_buildMessageForResponseErrors(response));
+ this.request = request2;
+ this.headers = headers;
+ this.response = response;
+ this.name = "GraphqlResponseError";
+ this.errors = response.errors;
+ this.data = response.data;
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
+ }
+};
- this.name = "HttpError";
- this.status = statusCode;
- let headers;
-
- if ("headers" in options && typeof options.headers !== "undefined") {
- headers = options.headers;
+// pkg/dist-src/graphql.js
+var NON_VARIABLE_OPTIONS = [
+ "method",
+ "baseUrl",
+ "url",
+ "headers",
+ "request",
+ "query",
+ "mediaType"
+];
+var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
+var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
+function graphql(request2, query, options) {
+ if (options) {
+ if (typeof query === "string" && "query" in options) {
+ return Promise.reject(
+ new Error(`[@octokit/graphql] "query" cannot be used as variable name`)
+ );
}
-
- if ("response" in options) {
- this.response = options.response;
- headers = options.response.headers;
- } // redact request credentials without mutating original request options
-
-
- const requestCopy = Object.assign({}, options.request);
-
- if (options.request.headers.authorization) {
- requestCopy.headers = Object.assign({}, options.request.headers, {
- authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
- });
+ for (const key in options) {
+ if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key))
+ continue;
+ return Promise.reject(
+ new Error(
+ `[@octokit/graphql] "${key}" cannot be used as variable name`
+ )
+ );
}
-
- requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
- // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
- .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
- // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
- .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
- this.request = requestCopy; // deprecations
-
- Object.defineProperty(this, "code", {
- get() {
- logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
- return statusCode;
- }
-
- });
- Object.defineProperty(this, "headers", {
- get() {
- logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."));
- return headers || {};
- }
-
- });
}
+ const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query;
+ const requestOptions = Object.keys(
+ parsedOptions
+ ).reduce((result, key) => {
+ if (NON_VARIABLE_OPTIONS.includes(key)) {
+ result[key] = parsedOptions[key];
+ return result;
+ }
+ if (!result.variables) {
+ result.variables = {};
+ }
+ result.variables[key] = parsedOptions[key];
+ return result;
+ }, {});
+ const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl;
+ if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
+ requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
+ }
+ return request2(requestOptions).then((response) => {
+ if (response.data.errors) {
+ const headers = {};
+ for (const key of Object.keys(response.headers)) {
+ headers[key] = response.headers[key];
+ }
+ throw new GraphqlResponseError(
+ requestOptions,
+ headers,
+ response.data
+ );
+ }
+ return response.data.data;
+ });
+}
+// pkg/dist-src/with-defaults.js
+function withDefaults(request2, newDefaults) {
+ const newRequest = request2.defaults(newDefaults);
+ const newApi = (query, options) => {
+ return graphql(newRequest, query, options);
+ };
+ return Object.assign(newApi, {
+ defaults: withDefaults.bind(null, newRequest),
+ endpoint: newRequest.endpoint
+ });
}
-exports.RequestError = RequestError;
-//# sourceMappingURL=index.js.map
+// pkg/dist-src/index.js
+var graphql2 = withDefaults(import_request3.request, {
+ headers: {
+ "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`
+ },
+ method: "POST",
+ url: "/graphql"
+});
+function withCustomRequest(customRequest) {
+ return withDefaults(customRequest, {
+ method: "POST",
+ url: "/graphql"
+ });
+}
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
-/***/ 6234:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ 4193:
+/***/ ((module) => {
"use strict";
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ composePaginateRest: () => composePaginateRest,
+ isPaginatingEndpoint: () => isPaginatingEndpoint,
+ paginateRest: () => paginateRest,
+ paginatingEndpoints: () => paginatingEndpoints
+});
+module.exports = __toCommonJS(dist_src_exports);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
-
-var endpoint = __nccwpck_require__(9440);
-var universalUserAgent = __nccwpck_require__(5030);
-var isPlainObject = __nccwpck_require__(3287);
-var nodeFetch = _interopDefault(__nccwpck_require__(467));
-var requestError = __nccwpck_require__(537);
-
-const VERSION = "5.6.3";
+// pkg/dist-src/version.js
+var VERSION = "9.0.0";
-function getBufferResponse(response) {
- return response.arrayBuffer();
+// pkg/dist-src/normalize-paginated-list-response.js
+function normalizePaginatedListResponse(response) {
+ if (!response.data) {
+ return {
+ ...response,
+ data: []
+ };
+ }
+ const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
+ if (!responseNeedsNormalization)
+ return response;
+ const incompleteResults = response.data.incomplete_results;
+ const repositorySelection = response.data.repository_selection;
+ const totalCount = response.data.total_count;
+ delete response.data.incomplete_results;
+ delete response.data.repository_selection;
+ delete response.data.total_count;
+ const namespaceKey = Object.keys(response.data)[0];
+ const data = response.data[namespaceKey];
+ response.data = data;
+ if (typeof incompleteResults !== "undefined") {
+ response.data.incomplete_results = incompleteResults;
+ }
+ if (typeof repositorySelection !== "undefined") {
+ response.data.repository_selection = repositorySelection;
+ }
+ response.data.total_count = totalCount;
+ return response;
}
-function fetchWrapper(requestOptions) {
- const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
+// pkg/dist-src/iterator.js
+function iterator(octokit, route, parameters) {
+ const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
+ const requestMethod = typeof route === "function" ? route : octokit.request;
+ const method = options.method;
+ const headers = options.headers;
+ let url = options.url;
+ return {
+ [Symbol.asyncIterator]: () => ({
+ async next() {
+ if (!url)
+ return { done: true };
+ try {
+ const response = await requestMethod({ method, url, headers });
+ const normalizedResponse = normalizePaginatedListResponse(response);
+ url = ((normalizedResponse.headers.link || "").match(
+ /<([^>]+)>;\s*rel="next"/
+ ) || [])[1];
+ return { value: normalizedResponse };
+ } catch (error) {
+ if (error.status !== 409)
+ throw error;
+ url = "";
+ return {
+ value: {
+ status: 200,
+ headers: {},
+ data: []
+ }
+ };
+ }
+ }
+ })
+ };
+}
- if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
- requestOptions.body = JSON.stringify(requestOptions.body);
+// pkg/dist-src/paginate.js
+function paginate(octokit, route, parameters, mapFn) {
+ if (typeof parameters === "function") {
+ mapFn = parameters;
+ parameters = void 0;
}
-
- let headers = {};
- let status;
- let url;
- const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
- return fetch(requestOptions.url, Object.assign({
- method: requestOptions.method,
- body: requestOptions.body,
- headers: requestOptions.headers,
- redirect: requestOptions.redirect
- }, // `requestOptions.request.agent` type is incompatible
- // see https://github.com/octokit/types.ts/pull/264
- requestOptions.request)).then(async response => {
- url = response.url;
- status = response.status;
-
- for (const keyAndValue of response.headers) {
- headers[keyAndValue[0]] = keyAndValue[1];
- }
-
- if ("deprecation" in headers) {
- const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
- const deprecationLink = matches && matches.pop();
- log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
- }
-
- if (status === 204 || status === 205) {
- return;
- } // GitHub API returns 200 for HEAD requests
-
-
- if (requestOptions.method === "HEAD") {
- if (status < 400) {
- return;
- }
-
- throw new requestError.RequestError(response.statusText, status, {
- response: {
- url,
- status,
- headers,
- data: undefined
- },
- request: requestOptions
- });
+ return gather(
+ octokit,
+ [],
+ iterator(octokit, route, parameters)[Symbol.asyncIterator](),
+ mapFn
+ );
+}
+function gather(octokit, results, iterator2, mapFn) {
+ return iterator2.next().then((result) => {
+ if (result.done) {
+ return results;
}
-
- if (status === 304) {
- throw new requestError.RequestError("Not modified", status, {
- response: {
- url,
- status,
- headers,
- data: await getResponseData(response)
- },
- request: requestOptions
- });
+ let earlyExit = false;
+ function done() {
+ earlyExit = true;
}
-
- if (status >= 400) {
- const data = await getResponseData(response);
- const error = new requestError.RequestError(toErrorMessage(data), status, {
- response: {
- url,
- status,
- headers,
- data
- },
- request: requestOptions
- });
- throw error;
+ results = results.concat(
+ mapFn ? mapFn(result.value, done) : result.value.data
+ );
+ if (earlyExit) {
+ return results;
}
-
- return getResponseData(response);
- }).then(data => {
- return {
- status,
- url,
- headers,
- data
- };
- }).catch(error => {
- if (error instanceof requestError.RequestError) throw error;
- throw new requestError.RequestError(error.message, 500, {
- request: requestOptions
- });
+ return gather(octokit, results, iterator2, mapFn);
});
}
-async function getResponseData(response) {
- const contentType = response.headers.get("content-type");
+// pkg/dist-src/compose-paginate.js
+var composePaginateRest = Object.assign(paginate, {
+ iterator
+});
- if (/application\/json/.test(contentType)) {
- return response.json();
- }
+// pkg/dist-src/generated/paginating-endpoints.js
+var paginatingEndpoints = [
+ "GET /advisories",
+ "GET /app/hook/deliveries",
+ "GET /app/installation-requests",
+ "GET /app/installations",
+ "GET /assignments/{assignment_id}/accepted_assignments",
+ "GET /classrooms",
+ "GET /classrooms/{classroom_id}/assignments",
+ "GET /enterprises/{enterprise}/dependabot/alerts",
+ "GET /enterprises/{enterprise}/secret-scanning/alerts",
+ "GET /events",
+ "GET /gists",
+ "GET /gists/public",
+ "GET /gists/starred",
+ "GET /gists/{gist_id}/comments",
+ "GET /gists/{gist_id}/commits",
+ "GET /gists/{gist_id}/forks",
+ "GET /installation/repositories",
+ "GET /issues",
+ "GET /licenses",
+ "GET /marketplace_listing/plans",
+ "GET /marketplace_listing/plans/{plan_id}/accounts",
+ "GET /marketplace_listing/stubbed/plans",
+ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts",
+ "GET /networks/{owner}/{repo}/events",
+ "GET /notifications",
+ "GET /organizations",
+ "GET /orgs/{org}/actions/cache/usage-by-repository",
+ "GET /orgs/{org}/actions/permissions/repositories",
+ "GET /orgs/{org}/actions/runners",
+ "GET /orgs/{org}/actions/secrets",
+ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories",
+ "GET /orgs/{org}/actions/variables",
+ "GET /orgs/{org}/actions/variables/{name}/repositories",
+ "GET /orgs/{org}/blocks",
+ "GET /orgs/{org}/code-scanning/alerts",
+ "GET /orgs/{org}/codespaces",
+ "GET /orgs/{org}/codespaces/secrets",
+ "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories",
+ "GET /orgs/{org}/copilot/billing/seats",
+ "GET /orgs/{org}/dependabot/alerts",
+ "GET /orgs/{org}/dependabot/secrets",
+ "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories",
+ "GET /orgs/{org}/events",
+ "GET /orgs/{org}/failed_invitations",
+ "GET /orgs/{org}/hooks",
+ "GET /orgs/{org}/hooks/{hook_id}/deliveries",
+ "GET /orgs/{org}/installations",
+ "GET /orgs/{org}/invitations",
+ "GET /orgs/{org}/invitations/{invitation_id}/teams",
+ "GET /orgs/{org}/issues",
+ "GET /orgs/{org}/members",
+ "GET /orgs/{org}/members/{username}/codespaces",
+ "GET /orgs/{org}/migrations",
+ "GET /orgs/{org}/migrations/{migration_id}/repositories",
+ "GET /orgs/{org}/outside_collaborators",
+ "GET /orgs/{org}/packages",
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions",
+ "GET /orgs/{org}/personal-access-token-requests",
+ "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories",
+ "GET /orgs/{org}/personal-access-tokens",
+ "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories",
+ "GET /orgs/{org}/projects",
+ "GET /orgs/{org}/public_members",
+ "GET /orgs/{org}/repos",
+ "GET /orgs/{org}/rulesets",
+ "GET /orgs/{org}/secret-scanning/alerts",
+ "GET /orgs/{org}/security-advisories",
+ "GET /orgs/{org}/teams",
+ "GET /orgs/{org}/teams/{team_slug}/discussions",
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments",
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions",
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions",
+ "GET /orgs/{org}/teams/{team_slug}/invitations",
+ "GET /orgs/{org}/teams/{team_slug}/members",
+ "GET /orgs/{org}/teams/{team_slug}/projects",
+ "GET /orgs/{org}/teams/{team_slug}/repos",
+ "GET /orgs/{org}/teams/{team_slug}/teams",
+ "GET /projects/columns/{column_id}/cards",
+ "GET /projects/{project_id}/collaborators",
+ "GET /projects/{project_id}/columns",
+ "GET /repos/{owner}/{repo}/actions/artifacts",
+ "GET /repos/{owner}/{repo}/actions/caches",
+ "GET /repos/{owner}/{repo}/actions/organization-secrets",
+ "GET /repos/{owner}/{repo}/actions/organization-variables",
+ "GET /repos/{owner}/{repo}/actions/runners",
+ "GET /repos/{owner}/{repo}/actions/runs",
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts",
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs",
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs",
+ "GET /repos/{owner}/{repo}/actions/secrets",
+ "GET /repos/{owner}/{repo}/actions/variables",
+ "GET /repos/{owner}/{repo}/actions/workflows",
+ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs",
+ "GET /repos/{owner}/{repo}/activity",
+ "GET /repos/{owner}/{repo}/assignees",
+ "GET /repos/{owner}/{repo}/branches",
+ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations",
+ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs",
+ "GET /repos/{owner}/{repo}/code-scanning/alerts",
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances",
+ "GET /repos/{owner}/{repo}/code-scanning/analyses",
+ "GET /repos/{owner}/{repo}/codespaces",
+ "GET /repos/{owner}/{repo}/codespaces/devcontainers",
+ "GET /repos/{owner}/{repo}/codespaces/secrets",
+ "GET /repos/{owner}/{repo}/collaborators",
+ "GET /repos/{owner}/{repo}/comments",
+ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions",
+ "GET /repos/{owner}/{repo}/commits",
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments",
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls",
+ "GET /repos/{owner}/{repo}/commits/{ref}/check-runs",
+ "GET /repos/{owner}/{repo}/commits/{ref}/check-suites",
+ "GET /repos/{owner}/{repo}/commits/{ref}/status",
+ "GET /repos/{owner}/{repo}/commits/{ref}/statuses",
+ "GET /repos/{owner}/{repo}/contributors",
+ "GET /repos/{owner}/{repo}/dependabot/alerts",
+ "GET /repos/{owner}/{repo}/dependabot/secrets",
+ "GET /repos/{owner}/{repo}/deployments",
+ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses",
+ "GET /repos/{owner}/{repo}/environments",
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies",
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps",
+ "GET /repos/{owner}/{repo}/events",
+ "GET /repos/{owner}/{repo}/forks",
+ "GET /repos/{owner}/{repo}/hooks",
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries",
+ "GET /repos/{owner}/{repo}/invitations",
+ "GET /repos/{owner}/{repo}/issues",
+ "GET /repos/{owner}/{repo}/issues/comments",
+ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions",
+ "GET /repos/{owner}/{repo}/issues/events",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/comments",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/events",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions",
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline",
+ "GET /repos/{owner}/{repo}/keys",
+ "GET /repos/{owner}/{repo}/labels",
+ "GET /repos/{owner}/{repo}/milestones",
+ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels",
+ "GET /repos/{owner}/{repo}/notifications",
+ "GET /repos/{owner}/{repo}/pages/builds",
+ "GET /repos/{owner}/{repo}/projects",
+ "GET /repos/{owner}/{repo}/pulls",
+ "GET /repos/{owner}/{repo}/pulls/comments",
+ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/files",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews",
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments",
+ "GET /repos/{owner}/{repo}/releases",
+ "GET /repos/{owner}/{repo}/releases/{release_id}/assets",
+ "GET /repos/{owner}/{repo}/releases/{release_id}/reactions",
+ "GET /repos/{owner}/{repo}/rules/branches/{branch}",
+ "GET /repos/{owner}/{repo}/rulesets",
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts",
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations",
+ "GET /repos/{owner}/{repo}/security-advisories",
+ "GET /repos/{owner}/{repo}/stargazers",
+ "GET /repos/{owner}/{repo}/subscribers",
+ "GET /repos/{owner}/{repo}/tags",
+ "GET /repos/{owner}/{repo}/teams",
+ "GET /repos/{owner}/{repo}/topics",
+ "GET /repositories",
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets",
+ "GET /repositories/{repository_id}/environments/{environment_name}/variables",
+ "GET /search/code",
+ "GET /search/commits",
+ "GET /search/issues",
+ "GET /search/labels",
+ "GET /search/repositories",
+ "GET /search/topics",
+ "GET /search/users",
+ "GET /teams/{team_id}/discussions",
+ "GET /teams/{team_id}/discussions/{discussion_number}/comments",
+ "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions",
+ "GET /teams/{team_id}/discussions/{discussion_number}/reactions",
+ "GET /teams/{team_id}/invitations",
+ "GET /teams/{team_id}/members",
+ "GET /teams/{team_id}/projects",
+ "GET /teams/{team_id}/repos",
+ "GET /teams/{team_id}/teams",
+ "GET /user/blocks",
+ "GET /user/codespaces",
+ "GET /user/codespaces/secrets",
+ "GET /user/emails",
+ "GET /user/followers",
+ "GET /user/following",
+ "GET /user/gpg_keys",
+ "GET /user/installations",
+ "GET /user/installations/{installation_id}/repositories",
+ "GET /user/issues",
+ "GET /user/keys",
+ "GET /user/marketplace_purchases",
+ "GET /user/marketplace_purchases/stubbed",
+ "GET /user/memberships/orgs",
+ "GET /user/migrations",
+ "GET /user/migrations/{migration_id}/repositories",
+ "GET /user/orgs",
+ "GET /user/packages",
+ "GET /user/packages/{package_type}/{package_name}/versions",
+ "GET /user/public_emails",
+ "GET /user/repos",
+ "GET /user/repository_invitations",
+ "GET /user/social_accounts",
+ "GET /user/ssh_signing_keys",
+ "GET /user/starred",
+ "GET /user/subscriptions",
+ "GET /user/teams",
+ "GET /users",
+ "GET /users/{username}/events",
+ "GET /users/{username}/events/orgs/{org}",
+ "GET /users/{username}/events/public",
+ "GET /users/{username}/followers",
+ "GET /users/{username}/following",
+ "GET /users/{username}/gists",
+ "GET /users/{username}/gpg_keys",
+ "GET /users/{username}/keys",
+ "GET /users/{username}/orgs",
+ "GET /users/{username}/packages",
+ "GET /users/{username}/projects",
+ "GET /users/{username}/received_events",
+ "GET /users/{username}/received_events/public",
+ "GET /users/{username}/repos",
+ "GET /users/{username}/social_accounts",
+ "GET /users/{username}/ssh_signing_keys",
+ "GET /users/{username}/starred",
+ "GET /users/{username}/subscriptions"
+];
- if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
- return response.text();
+// pkg/dist-src/paginating-endpoints.js
+function isPaginatingEndpoint(arg) {
+ if (typeof arg === "string") {
+ return paginatingEndpoints.includes(arg);
+ } else {
+ return false;
}
-
- return getBufferResponse(response);
-}
-
-function toErrorMessage(data) {
- if (typeof data === "string") return data; // istanbul ignore else - just in case
-
- if ("message" in data) {
- if (Array.isArray(data.errors)) {
- return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
- }
-
- return data.message;
- } // istanbul ignore next - just in case
-
-
- return `Unknown error: ${JSON.stringify(data)}`;
}
-function withDefaults(oldEndpoint, newDefaults) {
- const endpoint = oldEndpoint.defaults(newDefaults);
-
- const newApi = function (route, parameters) {
- const endpointOptions = endpoint.merge(route, parameters);
-
- if (!endpointOptions.request || !endpointOptions.request.hook) {
- return fetchWrapper(endpoint.parse(endpointOptions));
- }
-
- const request = (route, parameters) => {
- return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
- };
-
- Object.assign(request, {
- endpoint,
- defaults: withDefaults.bind(null, endpoint)
- });
- return endpointOptions.request.hook(request, endpointOptions);
+// pkg/dist-src/index.js
+function paginateRest(octokit) {
+ return {
+ paginate: Object.assign(paginate.bind(null, octokit), {
+ iterator: iterator.bind(null, octokit)
+ })
};
-
- return Object.assign(newApi, {
- endpoint,
- defaults: withDefaults.bind(null, endpoint)
- });
}
-
-const request = withDefaults(endpoint.endpoint, {
- headers: {
- "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
- }
-});
-
-exports.request = request;
-//# sourceMappingURL=index.js.map
+paginateRest.VERSION = VERSION;
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
/***/ }),
-/***/ 9417:
+/***/ 3044:
/***/ ((module) => {
"use strict";
-module.exports = balanced;
-function balanced(a, b, str) {
- if (a instanceof RegExp) a = maybeMatch(a, str);
- if (b instanceof RegExp) b = maybeMatch(b, str);
-
- var r = range(a, b, str);
-
- return r && {
- start: r[0],
- end: r[1],
- pre: str.slice(0, r[0]),
- body: str.slice(r[0] + a.length, r[1]),
- post: str.slice(r[1] + b.length)
- };
-}
-
-function maybeMatch(reg, str) {
- var m = str.match(reg);
- return m ? m[0] : null;
-}
-
-balanced.range = range;
-function range(a, b, str) {
- var begs, beg, left, right, result;
- var ai = str.indexOf(a);
- var bi = str.indexOf(b, ai + 1);
- var i = ai;
-
- if (ai >= 0 && bi > 0) {
- if(a===b) {
- return [ai, bi];
- }
- begs = [];
- left = str.length;
-
- while (i >= 0 && !result) {
- if (i == ai) {
- begs.push(i);
- ai = str.indexOf(a, i + 1);
- } else if (begs.length == 1) {
- result = [ begs.pop(), bi ];
- } else {
- beg = begs.pop();
- if (beg < left) {
- left = beg;
- right = bi;
- }
-
- bi = str.indexOf(b, i + 1);
- }
-
- i = ai < bi && ai >= 0 ? ai : bi;
- }
-
- if (begs.length) {
- result = [ left, right ];
- }
- }
-
- return result;
-}
-
-
-/***/ }),
-
-/***/ 3682:
-/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-
-var register = __nccwpck_require__(4670)
-var addHook = __nccwpck_require__(5549)
-var removeHook = __nccwpck_require__(6819)
-
-// bind with array of arguments: https://stackoverflow.com/a/21792913
-var bind = Function.bind
-var bindable = bind.bind(bind)
-
-function bindApi (hook, state, name) {
- var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
- hook.api = { remove: removeHookRef }
- hook.remove = removeHookRef
-
- ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
- var args = name ? [state, kind, name] : [state, kind]
- hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
- })
-}
-
-function HookSingular () {
- var singularHookName = 'h'
- var singularHookState = {
- registry: {}
- }
- var singularHook = register.bind(null, singularHookState, singularHookName)
- bindApi(singularHook, singularHookState, singularHookName)
- return singularHook
-}
-
-function HookCollection () {
- var state = {
- registry: {}
- }
-
- var hook = register.bind(null, state)
- bindApi(hook, state)
-
- return hook
-}
-
-var collectionHookDeprecationMessageDisplayed = false
-function Hook () {
- if (!collectionHookDeprecationMessageDisplayed) {
- console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
- collectionHookDeprecationMessageDisplayed = true
- }
- return HookCollection()
-}
-
-Hook.Singular = HookSingular.bind()
-Hook.Collection = HookCollection.bind()
-
-module.exports = Hook
-// expose constructors as a named property for TypeScript
-module.exports.Hook = Hook
-module.exports.Singular = Hook.Singular
-module.exports.Collection = Hook.Collection
-
-
-/***/ }),
-
-/***/ 5549:
-/***/ ((module) => {
-
-module.exports = addHook;
-
-function addHook(state, kind, name, hook) {
- var orig = hook;
- if (!state.registry[name]) {
- state.registry[name] = [];
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
- if (kind === "before") {
- hook = function (method, options) {
- return Promise.resolve()
- .then(orig.bind(null, options))
- .then(method.bind(null, options));
- };
- }
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ legacyRestEndpointMethods: () => legacyRestEndpointMethods,
+ restEndpointMethods: () => restEndpointMethods
+});
+module.exports = __toCommonJS(dist_src_exports);
- if (kind === "after") {
- hook = function (method, options) {
- var result;
- return Promise.resolve()
- .then(method.bind(null, options))
- .then(function (result_) {
- result = result_;
- return orig(result, options);
- })
- .then(function () {
- return result;
- });
- };
- }
+// pkg/dist-src/version.js
+var VERSION = "10.0.1";
+
+// pkg/dist-src/generated/endpoints.js
+var Endpoints = {
+ actions: {
+ addCustomLabelsToSelfHostedRunnerForOrg: [
+ "POST /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ addCustomLabelsToSelfHostedRunnerForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ addSelectedRepoToOrgSecret: [
+ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ addSelectedRepoToOrgVariable: [
+ "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
+ ],
+ approveWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"
+ ],
+ cancelWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"
+ ],
+ createEnvironmentVariable: [
+ "POST /repositories/{repository_id}/environments/{environment_name}/variables"
+ ],
+ createOrUpdateEnvironmentSecret: [
+ "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
+ ],
+ createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
+ createOrUpdateRepoSecret: [
+ "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"
+ ],
+ createOrgVariable: ["POST /orgs/{org}/actions/variables"],
+ createRegistrationTokenForOrg: [
+ "POST /orgs/{org}/actions/runners/registration-token"
+ ],
+ createRegistrationTokenForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/registration-token"
+ ],
+ createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
+ createRemoveTokenForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/remove-token"
+ ],
+ createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"],
+ createWorkflowDispatch: [
+ "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"
+ ],
+ deleteActionsCacheById: [
+ "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"
+ ],
+ deleteActionsCacheByKey: [
+ "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"
+ ],
+ deleteArtifact: [
+ "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"
+ ],
+ deleteEnvironmentSecret: [
+ "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
+ ],
+ deleteEnvironmentVariable: [
+ "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
+ ],
+ deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
+ deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"],
+ deleteRepoSecret: [
+ "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"
+ ],
+ deleteRepoVariable: [
+ "DELETE /repos/{owner}/{repo}/actions/variables/{name}"
+ ],
+ deleteSelfHostedRunnerFromOrg: [
+ "DELETE /orgs/{org}/actions/runners/{runner_id}"
+ ],
+ deleteSelfHostedRunnerFromRepo: [
+ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"
+ ],
+ deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
+ deleteWorkflowRunLogs: [
+ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
+ ],
+ disableSelectedRepositoryGithubActionsOrganization: [
+ "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"
+ ],
+ disableWorkflow: [
+ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"
+ ],
+ downloadArtifact: [
+ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"
+ ],
+ downloadJobLogsForWorkflowRun: [
+ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"
+ ],
+ downloadWorkflowRunAttemptLogs: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"
+ ],
+ downloadWorkflowRunLogs: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
+ ],
+ enableSelectedRepositoryGithubActionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"
+ ],
+ enableWorkflow: [
+ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"
+ ],
+ generateRunnerJitconfigForOrg: [
+ "POST /orgs/{org}/actions/runners/generate-jitconfig"
+ ],
+ generateRunnerJitconfigForRepo: [
+ "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig"
+ ],
+ getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"],
+ getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"],
+ getActionsCacheUsageByRepoForOrg: [
+ "GET /orgs/{org}/actions/cache/usage-by-repository"
+ ],
+ getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"],
+ getAllowedActionsOrganization: [
+ "GET /orgs/{org}/actions/permissions/selected-actions"
+ ],
+ getAllowedActionsRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions/selected-actions"
+ ],
+ getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
+ getEnvironmentPublicKey: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"
+ ],
+ getEnvironmentSecret: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"
+ ],
+ getEnvironmentVariable: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
+ ],
+ getGithubActionsDefaultWorkflowPermissionsOrganization: [
+ "GET /orgs/{org}/actions/permissions/workflow"
+ ],
+ getGithubActionsDefaultWorkflowPermissionsRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions/workflow"
+ ],
+ getGithubActionsPermissionsOrganization: [
+ "GET /orgs/{org}/actions/permissions"
+ ],
+ getGithubActionsPermissionsRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions"
+ ],
+ getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
+ getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
+ getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
+ getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"],
+ getPendingDeploymentsForRun: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
+ ],
+ getRepoPermissions: [
+ "GET /repos/{owner}/{repo}/actions/permissions",
+ {},
+ { renamed: ["actions", "getGithubActionsPermissionsRepository"] }
+ ],
+ getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
+ getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
+ getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"],
+ getReviewsForRun: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"
+ ],
+ getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
+ getSelfHostedRunnerForRepo: [
+ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}"
+ ],
+ getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
+ getWorkflowAccessToRepository: [
+ "GET /repos/{owner}/{repo}/actions/permissions/access"
+ ],
+ getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
+ getWorkflowRunAttempt: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"
+ ],
+ getWorkflowRunUsage: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"
+ ],
+ getWorkflowUsage: [
+ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"
+ ],
+ listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
+ listEnvironmentSecrets: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/secrets"
+ ],
+ listEnvironmentVariables: [
+ "GET /repositories/{repository_id}/environments/{environment_name}/variables"
+ ],
+ listJobsForWorkflowRun: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"
+ ],
+ listJobsForWorkflowRunAttempt: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"
+ ],
+ listLabelsForSelfHostedRunnerForOrg: [
+ "GET /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ listLabelsForSelfHostedRunnerForRepo: [
+ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
+ listOrgVariables: ["GET /orgs/{org}/actions/variables"],
+ listRepoOrganizationSecrets: [
+ "GET /repos/{owner}/{repo}/actions/organization-secrets"
+ ],
+ listRepoOrganizationVariables: [
+ "GET /repos/{owner}/{repo}/actions/organization-variables"
+ ],
+ listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
+ listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"],
+ listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
+ listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
+ listRunnerApplicationsForRepo: [
+ "GET /repos/{owner}/{repo}/actions/runners/downloads"
+ ],
+ listSelectedReposForOrgSecret: [
+ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories"
+ ],
+ listSelectedReposForOrgVariable: [
+ "GET /orgs/{org}/actions/variables/{name}/repositories"
+ ],
+ listSelectedRepositoriesEnabledGithubActionsOrganization: [
+ "GET /orgs/{org}/actions/permissions/repositories"
+ ],
+ listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
+ listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
+ listWorkflowRunArtifacts: [
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"
+ ],
+ listWorkflowRuns: [
+ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"
+ ],
+ listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
+ reRunJobForWorkflowRun: [
+ "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"
+ ],
+ reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
+ reRunWorkflowFailedJobs: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"
+ ],
+ removeAllCustomLabelsFromSelfHostedRunnerForOrg: [
+ "DELETE /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ removeAllCustomLabelsFromSelfHostedRunnerForRepo: [
+ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ removeCustomLabelFromSelfHostedRunnerForOrg: [
+ "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"
+ ],
+ removeCustomLabelFromSelfHostedRunnerForRepo: [
+ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"
+ ],
+ removeSelectedRepoFromOrgSecret: [
+ "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ removeSelectedRepoFromOrgVariable: [
+ "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
+ ],
+ reviewCustomGatesForRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule"
+ ],
+ reviewPendingDeploymentsForRun: [
+ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
+ ],
+ setAllowedActionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/selected-actions"
+ ],
+ setAllowedActionsRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"
+ ],
+ setCustomLabelsForSelfHostedRunnerForOrg: [
+ "PUT /orgs/{org}/actions/runners/{runner_id}/labels"
+ ],
+ setCustomLabelsForSelfHostedRunnerForRepo: [
+ "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+ ],
+ setGithubActionsDefaultWorkflowPermissionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/workflow"
+ ],
+ setGithubActionsDefaultWorkflowPermissionsRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions/workflow"
+ ],
+ setGithubActionsPermissionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions"
+ ],
+ setGithubActionsPermissionsRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions"
+ ],
+ setSelectedReposForOrgSecret: [
+ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"
+ ],
+ setSelectedReposForOrgVariable: [
+ "PUT /orgs/{org}/actions/variables/{name}/repositories"
+ ],
+ setSelectedRepositoriesEnabledGithubActionsOrganization: [
+ "PUT /orgs/{org}/actions/permissions/repositories"
+ ],
+ setWorkflowAccessToRepository: [
+ "PUT /repos/{owner}/{repo}/actions/permissions/access"
+ ],
+ updateEnvironmentVariable: [
+ "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}"
+ ],
+ updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"],
+ updateRepoVariable: [
+ "PATCH /repos/{owner}/{repo}/actions/variables/{name}"
+ ]
+ },
+ activity: {
+ checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
+ deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
+ deleteThreadSubscription: [
+ "DELETE /notifications/threads/{thread_id}/subscription"
+ ],
+ getFeeds: ["GET /feeds"],
+ getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
+ getThread: ["GET /notifications/threads/{thread_id}"],
+ getThreadSubscriptionForAuthenticatedUser: [
+ "GET /notifications/threads/{thread_id}/subscription"
+ ],
+ listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
+ listNotificationsForAuthenticatedUser: ["GET /notifications"],
+ listOrgEventsForAuthenticatedUser: [
+ "GET /users/{username}/events/orgs/{org}"
+ ],
+ listPublicEvents: ["GET /events"],
+ listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
+ listPublicEventsForUser: ["GET /users/{username}/events/public"],
+ listPublicOrgEvents: ["GET /orgs/{org}/events"],
+ listReceivedEventsForUser: ["GET /users/{username}/received_events"],
+ listReceivedPublicEventsForUser: [
+ "GET /users/{username}/received_events/public"
+ ],
+ listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
+ listRepoNotificationsForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/notifications"
+ ],
+ listReposStarredByAuthenticatedUser: ["GET /user/starred"],
+ listReposStarredByUser: ["GET /users/{username}/starred"],
+ listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
+ listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
+ listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
+ listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
+ markNotificationsAsRead: ["PUT /notifications"],
+ markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
+ markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
+ setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
+ setThreadSubscription: [
+ "PUT /notifications/threads/{thread_id}/subscription"
+ ],
+ starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
+ unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
+ },
+ apps: {
+ addRepoToInstallation: [
+ "PUT /user/installations/{installation_id}/repositories/{repository_id}",
+ {},
+ { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] }
+ ],
+ addRepoToInstallationForAuthenticatedUser: [
+ "PUT /user/installations/{installation_id}/repositories/{repository_id}"
+ ],
+ checkToken: ["POST /applications/{client_id}/token"],
+ createFromManifest: ["POST /app-manifests/{code}/conversions"],
+ createInstallationAccessToken: [
+ "POST /app/installations/{installation_id}/access_tokens"
+ ],
+ deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
+ deleteInstallation: ["DELETE /app/installations/{installation_id}"],
+ deleteToken: ["DELETE /applications/{client_id}/token"],
+ getAuthenticated: ["GET /app"],
+ getBySlug: ["GET /apps/{app_slug}"],
+ getInstallation: ["GET /app/installations/{installation_id}"],
+ getOrgInstallation: ["GET /orgs/{org}/installation"],
+ getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
+ getSubscriptionPlanForAccount: [
+ "GET /marketplace_listing/accounts/{account_id}"
+ ],
+ getSubscriptionPlanForAccountStubbed: [
+ "GET /marketplace_listing/stubbed/accounts/{account_id}"
+ ],
+ getUserInstallation: ["GET /users/{username}/installation"],
+ getWebhookConfigForApp: ["GET /app/hook/config"],
+ getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"],
+ listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
+ listAccountsForPlanStubbed: [
+ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"
+ ],
+ listInstallationReposForAuthenticatedUser: [
+ "GET /user/installations/{installation_id}/repositories"
+ ],
+ listInstallationRequestsForAuthenticatedApp: [
+ "GET /app/installation-requests"
+ ],
+ listInstallations: ["GET /app/installations"],
+ listInstallationsForAuthenticatedUser: ["GET /user/installations"],
+ listPlans: ["GET /marketplace_listing/plans"],
+ listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
+ listReposAccessibleToInstallation: ["GET /installation/repositories"],
+ listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
+ listSubscriptionsForAuthenticatedUserStubbed: [
+ "GET /user/marketplace_purchases/stubbed"
+ ],
+ listWebhookDeliveries: ["GET /app/hook/deliveries"],
+ redeliverWebhookDelivery: [
+ "POST /app/hook/deliveries/{delivery_id}/attempts"
+ ],
+ removeRepoFromInstallation: [
+ "DELETE /user/installations/{installation_id}/repositories/{repository_id}",
+ {},
+ { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] }
+ ],
+ removeRepoFromInstallationForAuthenticatedUser: [
+ "DELETE /user/installations/{installation_id}/repositories/{repository_id}"
+ ],
+ resetToken: ["PATCH /applications/{client_id}/token"],
+ revokeInstallationAccessToken: ["DELETE /installation/token"],
+ scopeToken: ["POST /applications/{client_id}/token/scoped"],
+ suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
+ unsuspendInstallation: [
+ "DELETE /app/installations/{installation_id}/suspended"
+ ],
+ updateWebhookConfigForApp: ["PATCH /app/hook/config"]
+ },
+ billing: {
+ getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
+ getGithubActionsBillingUser: [
+ "GET /users/{username}/settings/billing/actions"
+ ],
+ getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
+ getGithubPackagesBillingUser: [
+ "GET /users/{username}/settings/billing/packages"
+ ],
+ getSharedStorageBillingOrg: [
+ "GET /orgs/{org}/settings/billing/shared-storage"
+ ],
+ getSharedStorageBillingUser: [
+ "GET /users/{username}/settings/billing/shared-storage"
+ ]
+ },
+ checks: {
+ create: ["POST /repos/{owner}/{repo}/check-runs"],
+ createSuite: ["POST /repos/{owner}/{repo}/check-suites"],
+ get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"],
+ getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"],
+ listAnnotations: [
+ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"
+ ],
+ listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"],
+ listForSuite: [
+ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"
+ ],
+ listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"],
+ rerequestRun: [
+ "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"
+ ],
+ rerequestSuite: [
+ "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"
+ ],
+ setSuitesPreferences: [
+ "PATCH /repos/{owner}/{repo}/check-suites/preferences"
+ ],
+ update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]
+ },
+ codeScanning: {
+ deleteAnalysis: [
+ "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"
+ ],
+ getAlert: [
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}",
+ {},
+ { renamedParameters: { alert_id: "alert_number" } }
+ ],
+ getAnalysis: [
+ "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"
+ ],
+ getCodeqlDatabase: [
+ "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}"
+ ],
+ getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"],
+ getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
+ listAlertInstances: [
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"
+ ],
+ listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"],
+ listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
+ listAlertsInstances: [
+ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances",
+ {},
+ { renamed: ["codeScanning", "listAlertInstances"] }
+ ],
+ listCodeqlDatabases: [
+ "GET /repos/{owner}/{repo}/code-scanning/codeql/databases"
+ ],
+ listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
+ updateAlert: [
+ "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"
+ ],
+ updateDefaultSetup: [
+ "PATCH /repos/{owner}/{repo}/code-scanning/default-setup"
+ ],
+ uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
+ },
+ codesOfConduct: {
+ getAllCodesOfConduct: ["GET /codes_of_conduct"],
+ getConductCode: ["GET /codes_of_conduct/{key}"]
+ },
+ codespaces: {
+ addRepositoryForSecretForAuthenticatedUser: [
+ "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ addSelectedRepoToOrgSecret: [
+ "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ codespaceMachinesForAuthenticatedUser: [
+ "GET /user/codespaces/{codespace_name}/machines"
+ ],
+ createForAuthenticatedUser: ["POST /user/codespaces"],
+ createOrUpdateOrgSecret: [
+ "PUT /orgs/{org}/codespaces/secrets/{secret_name}"
+ ],
+ createOrUpdateRepoSecret: [
+ "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+ ],
+ createOrUpdateSecretForAuthenticatedUser: [
+ "PUT /user/codespaces/secrets/{secret_name}"
+ ],
+ createWithPrForAuthenticatedUser: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"
+ ],
+ createWithRepoForAuthenticatedUser: [
+ "POST /repos/{owner}/{repo}/codespaces"
+ ],
+ deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"],
+ deleteFromOrganization: [
+ "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"
+ ],
+ deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"],
+ deleteRepoSecret: [
+ "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+ ],
+ deleteSecretForAuthenticatedUser: [
+ "DELETE /user/codespaces/secrets/{secret_name}"
+ ],
+ exportForAuthenticatedUser: [
+ "POST /user/codespaces/{codespace_name}/exports"
+ ],
+ getCodespacesForUserInOrg: [
+ "GET /orgs/{org}/members/{username}/codespaces"
+ ],
+ getExportDetailsForAuthenticatedUser: [
+ "GET /user/codespaces/{codespace_name}/exports/{export_id}"
+ ],
+ getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"],
+ getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"],
+ getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"],
+ getPublicKeyForAuthenticatedUser: [
+ "GET /user/codespaces/secrets/public-key"
+ ],
+ getRepoPublicKey: [
+ "GET /repos/{owner}/{repo}/codespaces/secrets/public-key"
+ ],
+ getRepoSecret: [
+ "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+ ],
+ getSecretForAuthenticatedUser: [
+ "GET /user/codespaces/secrets/{secret_name}"
+ ],
+ listDevcontainersInRepositoryForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces/devcontainers"
+ ],
+ listForAuthenticatedUser: ["GET /user/codespaces"],
+ listInOrganization: [
+ "GET /orgs/{org}/codespaces",
+ {},
+ { renamedParameters: { org_id: "org" } }
+ ],
+ listInRepositoryForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces"
+ ],
+ listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"],
+ listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"],
+ listRepositoriesForSecretForAuthenticatedUser: [
+ "GET /user/codespaces/secrets/{secret_name}/repositories"
+ ],
+ listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"],
+ listSelectedReposForOrgSecret: [
+ "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
+ ],
+ preFlightWithRepoForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces/new"
+ ],
+ publishForAuthenticatedUser: [
+ "POST /user/codespaces/{codespace_name}/publish"
+ ],
+ removeRepositoryForSecretForAuthenticatedUser: [
+ "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ removeSelectedRepoFromOrgSecret: [
+ "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ repoMachinesForAuthenticatedUser: [
+ "GET /repos/{owner}/{repo}/codespaces/machines"
+ ],
+ setRepositoriesForSecretForAuthenticatedUser: [
+ "PUT /user/codespaces/secrets/{secret_name}/repositories"
+ ],
+ setSelectedReposForOrgSecret: [
+ "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
+ ],
+ startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"],
+ stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"],
+ stopInOrganization: [
+ "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"
+ ],
+ updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"]
+ },
+ copilot: {
+ addCopilotForBusinessSeatsForTeams: [
+ "POST /orgs/{org}/copilot/billing/selected_teams"
+ ],
+ addCopilotForBusinessSeatsForUsers: [
+ "POST /orgs/{org}/copilot/billing/selected_users"
+ ],
+ cancelCopilotSeatAssignmentForTeams: [
+ "DELETE /orgs/{org}/copilot/billing/selected_teams"
+ ],
+ cancelCopilotSeatAssignmentForUsers: [
+ "DELETE /orgs/{org}/copilot/billing/selected_users"
+ ],
+ getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"],
+ getCopilotSeatAssignmentDetailsForUser: [
+ "GET /orgs/{org}/members/{username}/copilot"
+ ],
+ listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"]
+ },
+ dependabot: {
+ addSelectedRepoToOrgSecret: [
+ "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ createOrUpdateOrgSecret: [
+ "PUT /orgs/{org}/dependabot/secrets/{secret_name}"
+ ],
+ createOrUpdateRepoSecret: [
+ "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+ ],
+ deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"],
+ deleteRepoSecret: [
+ "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+ ],
+ getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"],
+ getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"],
+ getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"],
+ getRepoPublicKey: [
+ "GET /repos/{owner}/{repo}/dependabot/secrets/public-key"
+ ],
+ getRepoSecret: [
+ "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+ ],
+ listAlertsForEnterprise: [
+ "GET /enterprises/{enterprise}/dependabot/alerts"
+ ],
+ listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"],
+ listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"],
+ listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"],
+ listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"],
+ listSelectedReposForOrgSecret: [
+ "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
+ ],
+ removeSelectedRepoFromOrgSecret: [
+ "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
+ ],
+ setSelectedReposForOrgSecret: [
+ "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
+ ],
+ updateAlert: [
+ "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"
+ ]
+ },
+ dependencyGraph: {
+ createRepositorySnapshot: [
+ "POST /repos/{owner}/{repo}/dependency-graph/snapshots"
+ ],
+ diffRange: [
+ "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"
+ ],
+ exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"]
+ },
+ emojis: { get: ["GET /emojis"] },
+ gists: {
+ checkIsStarred: ["GET /gists/{gist_id}/star"],
+ create: ["POST /gists"],
+ createComment: ["POST /gists/{gist_id}/comments"],
+ delete: ["DELETE /gists/{gist_id}"],
+ deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
+ fork: ["POST /gists/{gist_id}/forks"],
+ get: ["GET /gists/{gist_id}"],
+ getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
+ getRevision: ["GET /gists/{gist_id}/{sha}"],
+ list: ["GET /gists"],
+ listComments: ["GET /gists/{gist_id}/comments"],
+ listCommits: ["GET /gists/{gist_id}/commits"],
+ listForUser: ["GET /users/{username}/gists"],
+ listForks: ["GET /gists/{gist_id}/forks"],
+ listPublic: ["GET /gists/public"],
+ listStarred: ["GET /gists/starred"],
+ star: ["PUT /gists/{gist_id}/star"],
+ unstar: ["DELETE /gists/{gist_id}/star"],
+ update: ["PATCH /gists/{gist_id}"],
+ updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
+ },
+ git: {
+ createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
+ createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
+ createRef: ["POST /repos/{owner}/{repo}/git/refs"],
+ createTag: ["POST /repos/{owner}/{repo}/git/tags"],
+ createTree: ["POST /repos/{owner}/{repo}/git/trees"],
+ deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
+ getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
+ getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
+ getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
+ getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
+ getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
+ listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
+ updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
+ },
+ gitignore: {
+ getAllTemplates: ["GET /gitignore/templates"],
+ getTemplate: ["GET /gitignore/templates/{name}"]
+ },
+ interactions: {
+ getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"],
+ getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"],
+ getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"],
+ getRestrictionsForYourPublicRepos: [
+ "GET /user/interaction-limits",
+ {},
+ { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] }
+ ],
+ removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"],
+ removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"],
+ removeRestrictionsForRepo: [
+ "DELETE /repos/{owner}/{repo}/interaction-limits"
+ ],
+ removeRestrictionsForYourPublicRepos: [
+ "DELETE /user/interaction-limits",
+ {},
+ { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] }
+ ],
+ setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"],
+ setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"],
+ setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"],
+ setRestrictionsForYourPublicRepos: [
+ "PUT /user/interaction-limits",
+ {},
+ { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] }
+ ]
+ },
+ issues: {
+ addAssignees: [
+ "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"
+ ],
+ addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+ checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
+ checkUserCanBeAssignedToIssue: [
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}"
+ ],
+ create: ["POST /repos/{owner}/{repo}/issues"],
+ createComment: [
+ "POST /repos/{owner}/{repo}/issues/{issue_number}/comments"
+ ],
+ createLabel: ["POST /repos/{owner}/{repo}/labels"],
+ createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
+ deleteComment: [
+ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"
+ ],
+ deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
+ deleteMilestone: [
+ "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"
+ ],
+ get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
+ getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
+ getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
+ getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
+ getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
+ list: ["GET /issues"],
+ listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
+ listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
+ listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
+ listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
+ listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
+ listEventsForTimeline: [
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"
+ ],
+ listForAuthenticatedUser: ["GET /user/issues"],
+ listForOrg: ["GET /orgs/{org}/issues"],
+ listForRepo: ["GET /repos/{owner}/{repo}/issues"],
+ listLabelsForMilestone: [
+ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"
+ ],
+ listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
+ listLabelsOnIssue: [
+ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels"
+ ],
+ listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
+ lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
+ removeAllLabels: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"
+ ],
+ removeAssignees: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"
+ ],
+ removeLabel: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"
+ ],
+ setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+ unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
+ update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
+ updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
+ updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
+ updateMilestone: [
+ "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"
+ ]
+ },
+ licenses: {
+ get: ["GET /licenses/{license}"],
+ getAllCommonlyUsed: ["GET /licenses"],
+ getForRepo: ["GET /repos/{owner}/{repo}/license"]
+ },
+ markdown: {
+ render: ["POST /markdown"],
+ renderRaw: [
+ "POST /markdown/raw",
+ { headers: { "content-type": "text/plain; charset=utf-8" } }
+ ]
+ },
+ meta: {
+ get: ["GET /meta"],
+ getAllVersions: ["GET /versions"],
+ getOctocat: ["GET /octocat"],
+ getZen: ["GET /zen"],
+ root: ["GET /"]
+ },
+ migrations: {
+ cancelImport: ["DELETE /repos/{owner}/{repo}/import"],
+ deleteArchiveForAuthenticatedUser: [
+ "DELETE /user/migrations/{migration_id}/archive"
+ ],
+ deleteArchiveForOrg: [
+ "DELETE /orgs/{org}/migrations/{migration_id}/archive"
+ ],
+ downloadArchiveForOrg: [
+ "GET /orgs/{org}/migrations/{migration_id}/archive"
+ ],
+ getArchiveForAuthenticatedUser: [
+ "GET /user/migrations/{migration_id}/archive"
+ ],
+ getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"],
+ getImportStatus: ["GET /repos/{owner}/{repo}/import"],
+ getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"],
+ getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"],
+ getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"],
+ listForAuthenticatedUser: ["GET /user/migrations"],
+ listForOrg: ["GET /orgs/{org}/migrations"],
+ listReposForAuthenticatedUser: [
+ "GET /user/migrations/{migration_id}/repositories"
+ ],
+ listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"],
+ listReposForUser: [
+ "GET /user/migrations/{migration_id}/repositories",
+ {},
+ { renamed: ["migrations", "listReposForAuthenticatedUser"] }
+ ],
+ mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"],
+ setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"],
+ startForAuthenticatedUser: ["POST /user/migrations"],
+ startForOrg: ["POST /orgs/{org}/migrations"],
+ startImport: ["PUT /repos/{owner}/{repo}/import"],
+ unlockRepoForAuthenticatedUser: [
+ "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"
+ ],
+ unlockRepoForOrg: [
+ "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"
+ ],
+ updateImport: ["PATCH /repos/{owner}/{repo}/import"]
+ },
+ orgs: {
+ addSecurityManagerTeam: [
+ "PUT /orgs/{org}/security-managers/teams/{team_slug}"
+ ],
+ blockUser: ["PUT /orgs/{org}/blocks/{username}"],
+ cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"],
+ checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
+ checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
+ checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
+ convertMemberToOutsideCollaborator: [
+ "PUT /orgs/{org}/outside_collaborators/{username}"
+ ],
+ createInvitation: ["POST /orgs/{org}/invitations"],
+ createWebhook: ["POST /orgs/{org}/hooks"],
+ delete: ["DELETE /orgs/{org}"],
+ deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
+ enableOrDisableSecurityProductOnAllOrgRepos: [
+ "POST /orgs/{org}/{security_product}/{enablement}"
+ ],
+ get: ["GET /orgs/{org}"],
+ getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
+ getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
+ getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
+ getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"],
+ getWebhookDelivery: [
+ "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"
+ ],
+ list: ["GET /organizations"],
+ listAppInstallations: ["GET /orgs/{org}/installations"],
+ listBlockedUsers: ["GET /orgs/{org}/blocks"],
+ listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
+ listForAuthenticatedUser: ["GET /user/orgs"],
+ listForUser: ["GET /users/{username}/orgs"],
+ listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
+ listMembers: ["GET /orgs/{org}/members"],
+ listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
+ listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
+ listPatGrantRepositories: [
+ "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories"
+ ],
+ listPatGrantRequestRepositories: [
+ "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories"
+ ],
+ listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"],
+ listPatGrants: ["GET /orgs/{org}/personal-access-tokens"],
+ listPendingInvitations: ["GET /orgs/{org}/invitations"],
+ listPublicMembers: ["GET /orgs/{org}/public_members"],
+ listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"],
+ listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"],
+ listWebhooks: ["GET /orgs/{org}/hooks"],
+ pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
+ redeliverWebhookDelivery: [
+ "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
+ ],
+ removeMember: ["DELETE /orgs/{org}/members/{username}"],
+ removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
+ removeOutsideCollaborator: [
+ "DELETE /orgs/{org}/outside_collaborators/{username}"
+ ],
+ removePublicMembershipForAuthenticatedUser: [
+ "DELETE /orgs/{org}/public_members/{username}"
+ ],
+ removeSecurityManagerTeam: [
+ "DELETE /orgs/{org}/security-managers/teams/{team_slug}"
+ ],
+ reviewPatGrantRequest: [
+ "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}"
+ ],
+ reviewPatGrantRequestsInBulk: [
+ "POST /orgs/{org}/personal-access-token-requests"
+ ],
+ setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
+ setPublicMembershipForAuthenticatedUser: [
+ "PUT /orgs/{org}/public_members/{username}"
+ ],
+ unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
+ update: ["PATCH /orgs/{org}"],
+ updateMembershipForAuthenticatedUser: [
+ "PATCH /user/memberships/orgs/{org}"
+ ],
+ updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"],
+ updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"],
+ updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"],
+ updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"]
+ },
+ packages: {
+ deletePackageForAuthenticatedUser: [
+ "DELETE /user/packages/{package_type}/{package_name}"
+ ],
+ deletePackageForOrg: [
+ "DELETE /orgs/{org}/packages/{package_type}/{package_name}"
+ ],
+ deletePackageForUser: [
+ "DELETE /users/{username}/packages/{package_type}/{package_name}"
+ ],
+ deletePackageVersionForAuthenticatedUser: [
+ "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ deletePackageVersionForOrg: [
+ "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ deletePackageVersionForUser: [
+ "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ getAllPackageVersionsForAPackageOwnedByAnOrg: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions",
+ {},
+ { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] }
+ ],
+ getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}/versions",
+ {},
+ {
+ renamed: [
+ "packages",
+ "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"
+ ]
+ }
+ ],
+ getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}/versions"
+ ],
+ getAllPackageVersionsForPackageOwnedByOrg: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions"
+ ],
+ getAllPackageVersionsForPackageOwnedByUser: [
+ "GET /users/{username}/packages/{package_type}/{package_name}/versions"
+ ],
+ getPackageForAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}"
+ ],
+ getPackageForOrganization: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}"
+ ],
+ getPackageForUser: [
+ "GET /users/{username}/packages/{package_type}/{package_name}"
+ ],
+ getPackageVersionForAuthenticatedUser: [
+ "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ getPackageVersionForOrganization: [
+ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ getPackageVersionForUser: [
+ "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+ ],
+ listDockerMigrationConflictingPackagesForAuthenticatedUser: [
+ "GET /user/docker/conflicts"
+ ],
+ listDockerMigrationConflictingPackagesForOrganization: [
+ "GET /orgs/{org}/docker/conflicts"
+ ],
+ listDockerMigrationConflictingPackagesForUser: [
+ "GET /users/{username}/docker/conflicts"
+ ],
+ listPackagesForAuthenticatedUser: ["GET /user/packages"],
+ listPackagesForOrganization: ["GET /orgs/{org}/packages"],
+ listPackagesForUser: ["GET /users/{username}/packages"],
+ restorePackageForAuthenticatedUser: [
+ "POST /user/packages/{package_type}/{package_name}/restore{?token}"
+ ],
+ restorePackageForOrg: [
+ "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"
+ ],
+ restorePackageForUser: [
+ "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"
+ ],
+ restorePackageVersionForAuthenticatedUser: [
+ "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+ ],
+ restorePackageVersionForOrg: [
+ "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+ ],
+ restorePackageVersionForUser: [
+ "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+ ]
+ },
+ projects: {
+ addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"],
+ createCard: ["POST /projects/columns/{column_id}/cards"],
+ createColumn: ["POST /projects/{project_id}/columns"],
+ createForAuthenticatedUser: ["POST /user/projects"],
+ createForOrg: ["POST /orgs/{org}/projects"],
+ createForRepo: ["POST /repos/{owner}/{repo}/projects"],
+ delete: ["DELETE /projects/{project_id}"],
+ deleteCard: ["DELETE /projects/columns/cards/{card_id}"],
+ deleteColumn: ["DELETE /projects/columns/{column_id}"],
+ get: ["GET /projects/{project_id}"],
+ getCard: ["GET /projects/columns/cards/{card_id}"],
+ getColumn: ["GET /projects/columns/{column_id}"],
+ getPermissionForUser: [
+ "GET /projects/{project_id}/collaborators/{username}/permission"
+ ],
+ listCards: ["GET /projects/columns/{column_id}/cards"],
+ listCollaborators: ["GET /projects/{project_id}/collaborators"],
+ listColumns: ["GET /projects/{project_id}/columns"],
+ listForOrg: ["GET /orgs/{org}/projects"],
+ listForRepo: ["GET /repos/{owner}/{repo}/projects"],
+ listForUser: ["GET /users/{username}/projects"],
+ moveCard: ["POST /projects/columns/cards/{card_id}/moves"],
+ moveColumn: ["POST /projects/columns/{column_id}/moves"],
+ removeCollaborator: [
+ "DELETE /projects/{project_id}/collaborators/{username}"
+ ],
+ update: ["PATCH /projects/{project_id}"],
+ updateCard: ["PATCH /projects/columns/cards/{card_id}"],
+ updateColumn: ["PATCH /projects/columns/{column_id}"]
+ },
+ pulls: {
+ checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
+ create: ["POST /repos/{owner}/{repo}/pulls"],
+ createReplyForReviewComment: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"
+ ],
+ createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
+ createReviewComment: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"
+ ],
+ deletePendingReview: [
+ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+ ],
+ deleteReviewComment: [
+ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"
+ ],
+ dismissReview: [
+ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"
+ ],
+ get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
+ getReview: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+ ],
+ getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
+ list: ["GET /repos/{owner}/{repo}/pulls"],
+ listCommentsForReview: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"
+ ],
+ listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
+ listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
+ listRequestedReviewers: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+ ],
+ listReviewComments: [
+ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"
+ ],
+ listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
+ listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
+ merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
+ removeRequestedReviewers: [
+ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+ ],
+ requestReviewers: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+ ],
+ submitReview: [
+ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"
+ ],
+ update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
+ updateBranch: [
+ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"
+ ],
+ updateReview: [
+ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+ ],
+ updateReviewComment: [
+ "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"
+ ]
+ },
+ rateLimit: { get: ["GET /rate_limit"] },
+ reactions: {
+ createForCommitComment: [
+ "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"
+ ],
+ createForIssue: [
+ "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"
+ ],
+ createForIssueComment: [
+ "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
+ ],
+ createForPullRequestReviewComment: [
+ "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
+ ],
+ createForRelease: [
+ "POST /repos/{owner}/{repo}/releases/{release_id}/reactions"
+ ],
+ createForTeamDiscussionCommentInOrg: [
+ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
+ ],
+ createForTeamDiscussionInOrg: [
+ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
+ ],
+ deleteForCommitComment: [
+ "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"
+ ],
+ deleteForIssue: [
+ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"
+ ],
+ deleteForIssueComment: [
+ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"
+ ],
+ deleteForPullRequestComment: [
+ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"
+ ],
+ deleteForRelease: [
+ "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"
+ ],
+ deleteForTeamDiscussion: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"
+ ],
+ deleteForTeamDiscussionComment: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"
+ ],
+ listForCommitComment: [
+ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"
+ ],
+ listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
+ listForIssueComment: [
+ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
+ ],
+ listForPullRequestReviewComment: [
+ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
+ ],
+ listForRelease: [
+ "GET /repos/{owner}/{repo}/releases/{release_id}/reactions"
+ ],
+ listForTeamDiscussionCommentInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
+ ],
+ listForTeamDiscussionInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
+ ]
+ },
+ repos: {
+ acceptInvitation: [
+ "PATCH /user/repository_invitations/{invitation_id}",
+ {},
+ { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] }
+ ],
+ acceptInvitationForAuthenticatedUser: [
+ "PATCH /user/repository_invitations/{invitation_id}"
+ ],
+ addAppAccessRestrictions: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+ {},
+ { mapToData: "apps" }
+ ],
+ addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
+ addStatusCheckContexts: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+ {},
+ { mapToData: "contexts" }
+ ],
+ addTeamAccessRestrictions: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+ {},
+ { mapToData: "teams" }
+ ],
+ addUserAccessRestrictions: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+ {},
+ { mapToData: "users" }
+ ],
+ checkAutomatedSecurityFixes: [
+ "GET /repos/{owner}/{repo}/automated-security-fixes"
+ ],
+ checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
+ checkVulnerabilityAlerts: [
+ "GET /repos/{owner}/{repo}/vulnerability-alerts"
+ ],
+ codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"],
+ compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
+ compareCommitsWithBasehead: [
+ "GET /repos/{owner}/{repo}/compare/{basehead}"
+ ],
+ createAutolink: ["POST /repos/{owner}/{repo}/autolinks"],
+ createCommitComment: [
+ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"
+ ],
+ createCommitSignatureProtection: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+ ],
+ createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
+ createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
+ createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
+ createDeploymentBranchPolicy: [
+ "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
+ ],
+ createDeploymentProtectionRule: [
+ "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
+ ],
+ createDeploymentStatus: [
+ "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
+ ],
+ createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
+ createForAuthenticatedUser: ["POST /user/repos"],
+ createFork: ["POST /repos/{owner}/{repo}/forks"],
+ createInOrg: ["POST /orgs/{org}/repos"],
+ createOrUpdateEnvironment: [
+ "PUT /repos/{owner}/{repo}/environments/{environment_name}"
+ ],
+ createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
+ createOrgRuleset: ["POST /orgs/{org}/rulesets"],
+ createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"],
+ createPagesSite: ["POST /repos/{owner}/{repo}/pages"],
+ createRelease: ["POST /repos/{owner}/{repo}/releases"],
+ createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"],
+ createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"],
+ createUsingTemplate: [
+ "POST /repos/{template_owner}/{template_repo}/generate"
+ ],
+ createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
+ declineInvitation: [
+ "DELETE /user/repository_invitations/{invitation_id}",
+ {},
+ { renamed: ["repos", "declineInvitationForAuthenticatedUser"] }
+ ],
+ declineInvitationForAuthenticatedUser: [
+ "DELETE /user/repository_invitations/{invitation_id}"
+ ],
+ delete: ["DELETE /repos/{owner}/{repo}"],
+ deleteAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
+ ],
+ deleteAdminBranchProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+ ],
+ deleteAnEnvironment: [
+ "DELETE /repos/{owner}/{repo}/environments/{environment_name}"
+ ],
+ deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"],
+ deleteBranchProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection"
+ ],
+ deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
+ deleteCommitSignatureProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+ ],
+ deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
+ deleteDeployment: [
+ "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"
+ ],
+ deleteDeploymentBranchPolicy: [
+ "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+ ],
+ deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
+ deleteInvitation: [
+ "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"
+ ],
+ deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"],
+ deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"],
+ deletePullRequestReviewProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+ ],
+ deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
+ deleteReleaseAsset: [
+ "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"
+ ],
+ deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+ deleteTagProtection: [
+ "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"
+ ],
+ deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
+ disableAutomatedSecurityFixes: [
+ "DELETE /repos/{owner}/{repo}/automated-security-fixes"
+ ],
+ disableDeploymentProtectionRule: [
+ "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
+ ],
+ disablePrivateVulnerabilityReporting: [
+ "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting"
+ ],
+ disableVulnerabilityAlerts: [
+ "DELETE /repos/{owner}/{repo}/vulnerability-alerts"
+ ],
+ downloadArchive: [
+ "GET /repos/{owner}/{repo}/zipball/{ref}",
+ {},
+ { renamed: ["repos", "downloadZipballArchive"] }
+ ],
+ downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"],
+ downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"],
+ enableAutomatedSecurityFixes: [
+ "PUT /repos/{owner}/{repo}/automated-security-fixes"
+ ],
+ enablePrivateVulnerabilityReporting: [
+ "PUT /repos/{owner}/{repo}/private-vulnerability-reporting"
+ ],
+ enableVulnerabilityAlerts: [
+ "PUT /repos/{owner}/{repo}/vulnerability-alerts"
+ ],
+ generateReleaseNotes: [
+ "POST /repos/{owner}/{repo}/releases/generate-notes"
+ ],
+ get: ["GET /repos/{owner}/{repo}"],
+ getAccessRestrictions: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
+ ],
+ getAdminBranchProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+ ],
+ getAllDeploymentProtectionRules: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
+ ],
+ getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
+ getAllStatusCheckContexts: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"
+ ],
+ getAllTopics: ["GET /repos/{owner}/{repo}/topics"],
+ getAppsWithAccessToProtectedBranch: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"
+ ],
+ getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"],
+ getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
+ getBranchProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection"
+ ],
+ getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"],
+ getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
+ getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
+ getCollaboratorPermissionLevel: [
+ "GET /repos/{owner}/{repo}/collaborators/{username}/permission"
+ ],
+ getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
+ getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
+ getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
+ getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
+ getCommitSignatureProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+ ],
+ getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
+ getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
+ getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
+ getCustomDeploymentProtectionRule: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
+ ],
+ getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
+ getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
+ getDeploymentBranchPolicy: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+ ],
+ getDeploymentStatus: [
+ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"
+ ],
+ getEnvironment: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}"
+ ],
+ getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
+ getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
+ getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"],
+ getOrgRulesets: ["GET /orgs/{org}/rulesets"],
+ getPages: ["GET /repos/{owner}/{repo}/pages"],
+ getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
+ getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"],
+ getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
+ getPullRequestReviewProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+ ],
+ getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
+ getReadme: ["GET /repos/{owner}/{repo}/readme"],
+ getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"],
+ getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
+ getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
+ getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
+ getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+ getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"],
+ getStatusChecksProtection: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+ ],
+ getTeamsWithAccessToProtectedBranch: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"
+ ],
+ getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
+ getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
+ getUsersWithAccessToProtectedBranch: [
+ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"
+ ],
+ getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
+ getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
+ getWebhookConfigForRepo: [
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/config"
+ ],
+ getWebhookDelivery: [
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"
+ ],
+ listActivities: ["GET /repos/{owner}/{repo}/activity"],
+ listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"],
+ listBranches: ["GET /repos/{owner}/{repo}/branches"],
+ listBranchesForHeadCommit: [
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"
+ ],
+ listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
+ listCommentsForCommit: [
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"
+ ],
+ listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
+ listCommitStatusesForRef: [
+ "GET /repos/{owner}/{repo}/commits/{ref}/statuses"
+ ],
+ listCommits: ["GET /repos/{owner}/{repo}/commits"],
+ listContributors: ["GET /repos/{owner}/{repo}/contributors"],
+ listCustomDeploymentRuleIntegrations: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"
+ ],
+ listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
+ listDeploymentBranchPolicies: [
+ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
+ ],
+ listDeploymentStatuses: [
+ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
+ ],
+ listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
+ listForAuthenticatedUser: ["GET /user/repos"],
+ listForOrg: ["GET /orgs/{org}/repos"],
+ listForUser: ["GET /users/{username}/repos"],
+ listForks: ["GET /repos/{owner}/{repo}/forks"],
+ listInvitations: ["GET /repos/{owner}/{repo}/invitations"],
+ listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"],
+ listLanguages: ["GET /repos/{owner}/{repo}/languages"],
+ listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
+ listPublic: ["GET /repositories"],
+ listPullRequestsAssociatedWithCommit: [
+ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"
+ ],
+ listReleaseAssets: [
+ "GET /repos/{owner}/{repo}/releases/{release_id}/assets"
+ ],
+ listReleases: ["GET /repos/{owner}/{repo}/releases"],
+ listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"],
+ listTags: ["GET /repos/{owner}/{repo}/tags"],
+ listTeams: ["GET /repos/{owner}/{repo}/teams"],
+ listWebhookDeliveries: [
+ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"
+ ],
+ listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
+ merge: ["POST /repos/{owner}/{repo}/merges"],
+ mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"],
+ pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
+ redeliverWebhookDelivery: [
+ "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
+ ],
+ removeAppAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+ {},
+ { mapToData: "apps" }
+ ],
+ removeCollaborator: [
+ "DELETE /repos/{owner}/{repo}/collaborators/{username}"
+ ],
+ removeStatusCheckContexts: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+ {},
+ { mapToData: "contexts" }
+ ],
+ removeStatusCheckProtection: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+ ],
+ removeTeamAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+ {},
+ { mapToData: "teams" }
+ ],
+ removeUserAccessRestrictions: [
+ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+ {},
+ { mapToData: "users" }
+ ],
+ renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
+ replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
+ requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
+ setAdminBranchProtection: [
+ "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+ ],
+ setAppAccessRestrictions: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+ {},
+ { mapToData: "apps" }
+ ],
+ setStatusCheckContexts: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+ {},
+ { mapToData: "contexts" }
+ ],
+ setTeamAccessRestrictions: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+ {},
+ { mapToData: "teams" }
+ ],
+ setUserAccessRestrictions: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+ {},
+ { mapToData: "users" }
+ ],
+ testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
+ transfer: ["POST /repos/{owner}/{repo}/transfer"],
+ update: ["PATCH /repos/{owner}/{repo}"],
+ updateBranchProtection: [
+ "PUT /repos/{owner}/{repo}/branches/{branch}/protection"
+ ],
+ updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
+ updateDeploymentBranchPolicy: [
+ "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+ ],
+ updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
+ updateInvitation: [
+ "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"
+ ],
+ updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"],
+ updatePullRequestReviewProtection: [
+ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+ ],
+ updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
+ updateReleaseAsset: [
+ "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"
+ ],
+ updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+ updateStatusCheckPotection: [
+ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks",
+ {},
+ { renamed: ["repos", "updateStatusCheckProtection"] }
+ ],
+ updateStatusCheckProtection: [
+ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+ ],
+ updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
+ updateWebhookConfigForRepo: [
+ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"
+ ],
+ uploadReleaseAsset: [
+ "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}",
+ { baseUrl: "https://uploads.github.com" }
+ ]
+ },
+ search: {
+ code: ["GET /search/code"],
+ commits: ["GET /search/commits"],
+ issuesAndPullRequests: ["GET /search/issues"],
+ labels: ["GET /search/labels"],
+ repos: ["GET /search/repositories"],
+ topics: ["GET /search/topics"],
+ users: ["GET /search/users"]
+ },
+ secretScanning: {
+ getAlert: [
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
+ ],
+ listAlertsForEnterprise: [
+ "GET /enterprises/{enterprise}/secret-scanning/alerts"
+ ],
+ listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
+ listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
+ listLocationsForAlert: [
+ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"
+ ],
+ updateAlert: [
+ "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
+ ]
+ },
+ securityAdvisories: {
+ createPrivateVulnerabilityReport: [
+ "POST /repos/{owner}/{repo}/security-advisories/reports"
+ ],
+ createRepositoryAdvisory: [
+ "POST /repos/{owner}/{repo}/security-advisories"
+ ],
+ createRepositoryAdvisoryCveRequest: [
+ "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve"
+ ],
+ getGlobalAdvisory: ["GET /advisories/{ghsa_id}"],
+ getRepositoryAdvisory: [
+ "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
+ ],
+ listGlobalAdvisories: ["GET /advisories"],
+ listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"],
+ listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"],
+ updateRepositoryAdvisory: [
+ "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
+ ]
+ },
+ teams: {
+ addOrUpdateMembershipForUserInOrg: [
+ "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"
+ ],
+ addOrUpdateProjectPermissionsInOrg: [
+ "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+ ],
+ addOrUpdateRepoPermissionsInOrg: [
+ "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+ ],
+ checkPermissionsForProjectInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+ ],
+ checkPermissionsForRepoInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+ ],
+ create: ["POST /orgs/{org}/teams"],
+ createDiscussionCommentInOrg: [
+ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
+ ],
+ createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
+ deleteDiscussionCommentInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+ ],
+ deleteDiscussionInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+ ],
+ deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
+ getByName: ["GET /orgs/{org}/teams/{team_slug}"],
+ getDiscussionCommentInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+ ],
+ getDiscussionInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+ ],
+ getMembershipForUserInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/memberships/{username}"
+ ],
+ list: ["GET /orgs/{org}/teams"],
+ listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
+ listDiscussionCommentsInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
+ ],
+ listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
+ listForAuthenticatedUser: ["GET /user/teams"],
+ listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
+ listPendingInvitationsInOrg: [
+ "GET /orgs/{org}/teams/{team_slug}/invitations"
+ ],
+ listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"],
+ listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
+ removeMembershipForUserInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"
+ ],
+ removeProjectInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+ ],
+ removeRepoInOrg: [
+ "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+ ],
+ updateDiscussionCommentInOrg: [
+ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+ ],
+ updateDiscussionInOrg: [
+ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+ ],
+ updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
+ },
+ users: {
+ addEmailForAuthenticated: [
+ "POST /user/emails",
+ {},
+ { renamed: ["users", "addEmailForAuthenticatedUser"] }
+ ],
+ addEmailForAuthenticatedUser: ["POST /user/emails"],
+ addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"],
+ block: ["PUT /user/blocks/{username}"],
+ checkBlocked: ["GET /user/blocks/{username}"],
+ checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
+ checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
+ createGpgKeyForAuthenticated: [
+ "POST /user/gpg_keys",
+ {},
+ { renamed: ["users", "createGpgKeyForAuthenticatedUser"] }
+ ],
+ createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"],
+ createPublicSshKeyForAuthenticated: [
+ "POST /user/keys",
+ {},
+ { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] }
+ ],
+ createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"],
+ createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"],
+ deleteEmailForAuthenticated: [
+ "DELETE /user/emails",
+ {},
+ { renamed: ["users", "deleteEmailForAuthenticatedUser"] }
+ ],
+ deleteEmailForAuthenticatedUser: ["DELETE /user/emails"],
+ deleteGpgKeyForAuthenticated: [
+ "DELETE /user/gpg_keys/{gpg_key_id}",
+ {},
+ { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] }
+ ],
+ deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"],
+ deletePublicSshKeyForAuthenticated: [
+ "DELETE /user/keys/{key_id}",
+ {},
+ { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] }
+ ],
+ deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"],
+ deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"],
+ deleteSshSigningKeyForAuthenticatedUser: [
+ "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}"
+ ],
+ follow: ["PUT /user/following/{username}"],
+ getAuthenticated: ["GET /user"],
+ getByUsername: ["GET /users/{username}"],
+ getContextForUser: ["GET /users/{username}/hovercard"],
+ getGpgKeyForAuthenticated: [
+ "GET /user/gpg_keys/{gpg_key_id}",
+ {},
+ { renamed: ["users", "getGpgKeyForAuthenticatedUser"] }
+ ],
+ getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"],
+ getPublicSshKeyForAuthenticated: [
+ "GET /user/keys/{key_id}",
+ {},
+ { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] }
+ ],
+ getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"],
+ getSshSigningKeyForAuthenticatedUser: [
+ "GET /user/ssh_signing_keys/{ssh_signing_key_id}"
+ ],
+ list: ["GET /users"],
+ listBlockedByAuthenticated: [
+ "GET /user/blocks",
+ {},
+ { renamed: ["users", "listBlockedByAuthenticatedUser"] }
+ ],
+ listBlockedByAuthenticatedUser: ["GET /user/blocks"],
+ listEmailsForAuthenticated: [
+ "GET /user/emails",
+ {},
+ { renamed: ["users", "listEmailsForAuthenticatedUser"] }
+ ],
+ listEmailsForAuthenticatedUser: ["GET /user/emails"],
+ listFollowedByAuthenticated: [
+ "GET /user/following",
+ {},
+ { renamed: ["users", "listFollowedByAuthenticatedUser"] }
+ ],
+ listFollowedByAuthenticatedUser: ["GET /user/following"],
+ listFollowersForAuthenticatedUser: ["GET /user/followers"],
+ listFollowersForUser: ["GET /users/{username}/followers"],
+ listFollowingForUser: ["GET /users/{username}/following"],
+ listGpgKeysForAuthenticated: [
+ "GET /user/gpg_keys",
+ {},
+ { renamed: ["users", "listGpgKeysForAuthenticatedUser"] }
+ ],
+ listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"],
+ listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
+ listPublicEmailsForAuthenticated: [
+ "GET /user/public_emails",
+ {},
+ { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] }
+ ],
+ listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"],
+ listPublicKeysForUser: ["GET /users/{username}/keys"],
+ listPublicSshKeysForAuthenticated: [
+ "GET /user/keys",
+ {},
+ { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] }
+ ],
+ listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"],
+ listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"],
+ listSocialAccountsForUser: ["GET /users/{username}/social_accounts"],
+ listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"],
+ listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"],
+ setPrimaryEmailVisibilityForAuthenticated: [
+ "PATCH /user/email/visibility",
+ {},
+ { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] }
+ ],
+ setPrimaryEmailVisibilityForAuthenticatedUser: [
+ "PATCH /user/email/visibility"
+ ],
+ unblock: ["DELETE /user/blocks/{username}"],
+ unfollow: ["DELETE /user/following/{username}"],
+ updateAuthenticated: ["PATCH /user"]
+ }
+};
+var endpoints_default = Endpoints;
+
+// pkg/dist-src/endpoints-to-methods.js
+var endpointMethodsMap = /* @__PURE__ */ new Map();
+for (const [scope, endpoints] of Object.entries(endpoints_default)) {
+ for (const [methodName, endpoint] of Object.entries(endpoints)) {
+ const [route, defaults, decorations] = endpoint;
+ const [method, url] = route.split(/ /);
+ const endpointDefaults = Object.assign(
+ {
+ method,
+ url
+ },
+ defaults
+ );
+ if (!endpointMethodsMap.has(scope)) {
+ endpointMethodsMap.set(scope, /* @__PURE__ */ new Map());
+ }
+ endpointMethodsMap.get(scope).set(methodName, {
+ scope,
+ methodName,
+ endpointDefaults,
+ decorations
+ });
+ }
+}
+var handler = {
+ has({ scope }, methodName) {
+ return endpointMethodsMap.get(scope).has(methodName);
+ },
+ getOwnPropertyDescriptor(target, methodName) {
+ return {
+ value: this.get(target, methodName),
+ // ensures method is in the cache
+ configurable: true,
+ writable: true,
+ enumerable: true
+ };
+ },
+ defineProperty(target, methodName, descriptor) {
+ Object.defineProperty(target.cache, methodName, descriptor);
+ return true;
+ },
+ deleteProperty(target, methodName) {
+ delete target.cache[methodName];
+ return true;
+ },
+ ownKeys({ scope }) {
+ return [...endpointMethodsMap.get(scope).keys()];
+ },
+ set(target, methodName, value) {
+ return target.cache[methodName] = value;
+ },
+ get({ octokit, scope, cache }, methodName) {
+ if (cache[methodName]) {
+ return cache[methodName];
+ }
+ const method = endpointMethodsMap.get(scope).get(methodName);
+ if (!method) {
+ return void 0;
+ }
+ const { endpointDefaults, decorations } = method;
+ if (decorations) {
+ cache[methodName] = decorate(
+ octokit,
+ scope,
+ methodName,
+ endpointDefaults,
+ decorations
+ );
+ } else {
+ cache[methodName] = octokit.request.defaults(endpointDefaults);
+ }
+ return cache[methodName];
+ }
+};
+function endpointsToMethods(octokit) {
+ const newMethods = {};
+ for (const scope of endpointMethodsMap.keys()) {
+ newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler);
+ }
+ return newMethods;
+}
+function decorate(octokit, scope, methodName, defaults, decorations) {
+ const requestWithDefaults = octokit.request.defaults(defaults);
+ function withDecorations(...args) {
+ let options = requestWithDefaults.endpoint.merge(...args);
+ if (decorations.mapToData) {
+ options = Object.assign({}, options, {
+ data: options[decorations.mapToData],
+ [decorations.mapToData]: void 0
+ });
+ return requestWithDefaults(options);
+ }
+ if (decorations.renamed) {
+ const [newScope, newMethodName] = decorations.renamed;
+ octokit.log.warn(
+ `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`
+ );
+ }
+ if (decorations.deprecated) {
+ octokit.log.warn(decorations.deprecated);
+ }
+ if (decorations.renamedParameters) {
+ const options2 = requestWithDefaults.endpoint.merge(...args);
+ for (const [name, alias] of Object.entries(
+ decorations.renamedParameters
+ )) {
+ if (name in options2) {
+ octokit.log.warn(
+ `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`
+ );
+ if (!(alias in options2)) {
+ options2[alias] = options2[name];
+ }
+ delete options2[name];
+ }
+ }
+ return requestWithDefaults(options2);
+ }
+ return requestWithDefaults(...args);
+ }
+ return Object.assign(withDecorations, requestWithDefaults);
+}
+
+// pkg/dist-src/index.js
+function restEndpointMethods(octokit) {
+ const api = endpointsToMethods(octokit);
+ return {
+ rest: api
+ };
+}
+restEndpointMethods.VERSION = VERSION;
+function legacyRestEndpointMethods(octokit) {
+ const api = endpointsToMethods(octokit);
+ return {
+ ...api,
+ rest: api
+ };
+}
+legacyRestEndpointMethods.VERSION = VERSION;
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 537:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+var __create = Object.create;
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __getProtoOf = Object.getPrototypeOf;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
+ // If the importer is in node compatibility mode or this is not an ESM
+ // file that has been converted to a CommonJS file using a Babel-
+ // compatible transform (i.e. "__esModule" has not been set), then set
+ // "default" to the CommonJS "module.exports" for node compatibility.
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
+ mod
+));
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ RequestError: () => RequestError
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_deprecation = __nccwpck_require__(8932);
+var import_once = __toESM(__nccwpck_require__(1223));
+var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
+var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
+var RequestError = class extends Error {
+ constructor(message, statusCode, options) {
+ super(message);
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+ this.name = "HttpError";
+ this.status = statusCode;
+ let headers;
+ if ("headers" in options && typeof options.headers !== "undefined") {
+ headers = options.headers;
+ }
+ if ("response" in options) {
+ this.response = options.response;
+ headers = options.response.headers;
+ }
+ const requestCopy = Object.assign({}, options.request);
+ if (options.request.headers.authorization) {
+ requestCopy.headers = Object.assign({}, options.request.headers, {
+ authorization: options.request.headers.authorization.replace(
+ / .*$/,
+ " [REDACTED]"
+ )
+ });
+ }
+ requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
+ this.request = requestCopy;
+ Object.defineProperty(this, "code", {
+ get() {
+ logOnceCode(
+ new import_deprecation.Deprecation(
+ "[@octokit/request-error] `error.code` is deprecated, use `error.status`."
+ )
+ );
+ return statusCode;
+ }
+ });
+ Object.defineProperty(this, "headers", {
+ get() {
+ logOnceHeaders(
+ new import_deprecation.Deprecation(
+ "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."
+ )
+ );
+ return headers || {};
+ }
+ });
+ }
+};
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 6234:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __export = (target, all) => {
+ for (var name in all)
+ __defProp(target, name, { get: all[name], enumerable: true });
+};
+var __copyProps = (to, from, except, desc) => {
+ if (from && typeof from === "object" || typeof from === "function") {
+ for (let key of __getOwnPropNames(from))
+ if (!__hasOwnProp.call(to, key) && key !== except)
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
+ }
+ return to;
+};
+var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
+
+// pkg/dist-src/index.js
+var dist_src_exports = {};
+__export(dist_src_exports, {
+ request: () => request
+});
+module.exports = __toCommonJS(dist_src_exports);
+var import_endpoint = __nccwpck_require__(9440);
+var import_universal_user_agent = __nccwpck_require__(5030);
+
+// pkg/dist-src/version.js
+var VERSION = "8.1.4";
+
+// pkg/dist-src/fetch-wrapper.js
+var import_is_plain_object = __nccwpck_require__(3287);
+var import_request_error = __nccwpck_require__(537);
+
+// pkg/dist-src/get-buffer-response.js
+function getBufferResponse(response) {
+ return response.arrayBuffer();
+}
+
+// pkg/dist-src/fetch-wrapper.js
+function fetchWrapper(requestOptions) {
+ var _a, _b, _c;
+ const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
+ const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false;
+ if ((0, import_is_plain_object.isPlainObject)(requestOptions.body) || Array.isArray(requestOptions.body)) {
+ requestOptions.body = JSON.stringify(requestOptions.body);
+ }
+ let headers = {};
+ let status;
+ let url;
+ let { fetch } = globalThis;
+ if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) {
+ fetch = requestOptions.request.fetch;
+ }
+ if (!fetch) {
+ throw new Error(
+ "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing"
+ );
+ }
+ return fetch(requestOptions.url, {
+ method: requestOptions.method,
+ body: requestOptions.body,
+ headers: requestOptions.headers,
+ signal: (_c = requestOptions.request) == null ? void 0 : _c.signal,
+ // duplex must be set if request.body is ReadableStream or Async Iterables.
+ // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.
+ ...requestOptions.body && { duplex: "half" }
+ }).then(async (response) => {
+ url = response.url;
+ status = response.status;
+ for (const keyAndValue of response.headers) {
+ headers[keyAndValue[0]] = keyAndValue[1];
+ }
+ if ("deprecation" in headers) {
+ const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
+ const deprecationLink = matches && matches.pop();
+ log.warn(
+ `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`
+ );
+ }
+ if (status === 204 || status === 205) {
+ return;
+ }
+ if (requestOptions.method === "HEAD") {
+ if (status < 400) {
+ return;
+ }
+ throw new import_request_error.RequestError(response.statusText, status, {
+ response: {
+ url,
+ status,
+ headers,
+ data: void 0
+ },
+ request: requestOptions
+ });
+ }
+ if (status === 304) {
+ throw new import_request_error.RequestError("Not modified", status, {
+ response: {
+ url,
+ status,
+ headers,
+ data: await getResponseData(response)
+ },
+ request: requestOptions
+ });
+ }
+ if (status >= 400) {
+ const data = await getResponseData(response);
+ const error = new import_request_error.RequestError(toErrorMessage(data), status, {
+ response: {
+ url,
+ status,
+ headers,
+ data
+ },
+ request: requestOptions
+ });
+ throw error;
+ }
+ return parseSuccessResponseBody ? await getResponseData(response) : response.body;
+ }).then((data) => {
+ return {
+ status,
+ url,
+ headers,
+ data
+ };
+ }).catch((error) => {
+ if (error instanceof import_request_error.RequestError)
+ throw error;
+ else if (error.name === "AbortError")
+ throw error;
+ let message = error.message;
+ if (error.name === "TypeError" && "cause" in error) {
+ if (error.cause instanceof Error) {
+ message = error.cause.message;
+ } else if (typeof error.cause === "string") {
+ message = error.cause;
+ }
+ }
+ throw new import_request_error.RequestError(message, 500, {
+ request: requestOptions
+ });
+ });
+}
+async function getResponseData(response) {
+ const contentType = response.headers.get("content-type");
+ if (/application\/json/.test(contentType)) {
+ return response.json();
+ }
+ if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
+ return response.text();
+ }
+ return getBufferResponse(response);
+}
+function toErrorMessage(data) {
+ if (typeof data === "string")
+ return data;
+ if ("message" in data) {
+ if (Array.isArray(data.errors)) {
+ return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
+ }
+ return data.message;
+ }
+ return `Unknown error: ${JSON.stringify(data)}`;
+}
+
+// pkg/dist-src/with-defaults.js
+function withDefaults(oldEndpoint, newDefaults) {
+ const endpoint2 = oldEndpoint.defaults(newDefaults);
+ const newApi = function(route, parameters) {
+ const endpointOptions = endpoint2.merge(route, parameters);
+ if (!endpointOptions.request || !endpointOptions.request.hook) {
+ return fetchWrapper(endpoint2.parse(endpointOptions));
+ }
+ const request2 = (route2, parameters2) => {
+ return fetchWrapper(
+ endpoint2.parse(endpoint2.merge(route2, parameters2))
+ );
+ };
+ Object.assign(request2, {
+ endpoint: endpoint2,
+ defaults: withDefaults.bind(null, endpoint2)
+ });
+ return endpointOptions.request.hook(request2, endpointOptions);
+ };
+ return Object.assign(newApi, {
+ endpoint: endpoint2,
+ defaults: withDefaults.bind(null, endpoint2)
+ });
+}
+
+// pkg/dist-src/index.js
+var request = withDefaults(import_endpoint.endpoint, {
+ headers: {
+ "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`
+ }
+});
+// Annotate the CommonJS export names for ESM import in node:
+0 && (0);
+
+
+/***/ }),
+
+/***/ 9417:
+/***/ ((module) => {
+
+"use strict";
+
+module.exports = balanced;
+function balanced(a, b, str) {
+ if (a instanceof RegExp) a = maybeMatch(a, str);
+ if (b instanceof RegExp) b = maybeMatch(b, str);
+
+ var r = range(a, b, str);
+
+ return r && {
+ start: r[0],
+ end: r[1],
+ pre: str.slice(0, r[0]),
+ body: str.slice(r[0] + a.length, r[1]),
+ post: str.slice(r[1] + b.length)
+ };
+}
+
+function maybeMatch(reg, str) {
+ var m = str.match(reg);
+ return m ? m[0] : null;
+}
+
+balanced.range = range;
+function range(a, b, str) {
+ var begs, beg, left, right, result;
+ var ai = str.indexOf(a);
+ var bi = str.indexOf(b, ai + 1);
+ var i = ai;
+
+ if (ai >= 0 && bi > 0) {
+ if(a===b) {
+ return [ai, bi];
+ }
+ begs = [];
+ left = str.length;
+
+ while (i >= 0 && !result) {
+ if (i == ai) {
+ begs.push(i);
+ ai = str.indexOf(a, i + 1);
+ } else if (begs.length == 1) {
+ result = [ begs.pop(), bi ];
+ } else {
+ beg = begs.pop();
+ if (beg < left) {
+ left = beg;
+ right = bi;
+ }
+
+ bi = str.indexOf(b, i + 1);
+ }
+
+ i = ai < bi && ai >= 0 ? ai : bi;
+ }
+
+ if (begs.length) {
+ result = [ left, right ];
+ }
+ }
+
+ return result;
+}
+
+
+/***/ }),
+
+/***/ 3682:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var register = __nccwpck_require__(4670);
+var addHook = __nccwpck_require__(5549);
+var removeHook = __nccwpck_require__(6819);
+
+// bind with array of arguments: https://stackoverflow.com/a/21792913
+var bind = Function.bind;
+var bindable = bind.bind(bind);
+
+function bindApi(hook, state, name) {
+ var removeHookRef = bindable(removeHook, null).apply(
+ null,
+ name ? [state, name] : [state]
+ );
+ hook.api = { remove: removeHookRef };
+ hook.remove = removeHookRef;
+ ["before", "error", "after", "wrap"].forEach(function (kind) {
+ var args = name ? [state, kind, name] : [state, kind];
+ hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);
+ });
+}
+
+function HookSingular() {
+ var singularHookName = "h";
+ var singularHookState = {
+ registry: {},
+ };
+ var singularHook = register.bind(null, singularHookState, singularHookName);
+ bindApi(singularHook, singularHookState, singularHookName);
+ return singularHook;
+}
+
+function HookCollection() {
+ var state = {
+ registry: {},
+ };
+
+ var hook = register.bind(null, state);
+ bindApi(hook, state);
+
+ return hook;
+}
+
+var collectionHookDeprecationMessageDisplayed = false;
+function Hook() {
+ if (!collectionHookDeprecationMessageDisplayed) {
+ console.warn(
+ '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4'
+ );
+ collectionHookDeprecationMessageDisplayed = true;
+ }
+ return HookCollection();
+}
+
+Hook.Singular = HookSingular.bind();
+Hook.Collection = HookCollection.bind();
+
+module.exports = Hook;
+// expose constructors as a named property for TypeScript
+module.exports.Hook = Hook;
+module.exports.Singular = Hook.Singular;
+module.exports.Collection = Hook.Collection;
+
+
+/***/ }),
+
+/***/ 5549:
+/***/ ((module) => {
+
+module.exports = addHook;
+
+function addHook(state, kind, name, hook) {
+ var orig = hook;
+ if (!state.registry[name]) {
+ state.registry[name] = [];
+ }
+
+ if (kind === "before") {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(orig.bind(null, options))
+ .then(method.bind(null, options));
+ };
+ }
+
+ if (kind === "after") {
+ hook = function (method, options) {
+ var result;
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .then(function (result_) {
+ result = result_;
+ return orig(result, options);
+ })
+ .then(function () {
+ return result;
+ });
+ };
+ }
if (kind === "error") {
hook = function (method, options) {
@@ -6576,5607 +9283,24691 @@ function addHook(state, kind, name, hook) {
};
}
- state.registry[name].push({
- hook: hook,
- orig: orig,
- });
+ state.registry[name].push({
+ hook: hook,
+ orig: orig,
+ });
+}
+
+
+/***/ }),
+
+/***/ 4670:
+/***/ ((module) => {
+
+module.exports = register;
+
+function register(state, name, method, options) {
+ if (typeof method !== "function") {
+ throw new Error("method for before hook must be a function");
+ }
+
+ if (!options) {
+ options = {};
+ }
+
+ if (Array.isArray(name)) {
+ return name.reverse().reduce(function (callback, name) {
+ return register.bind(null, state, name, callback, options);
+ }, method)();
+ }
+
+ return Promise.resolve().then(function () {
+ if (!state.registry[name]) {
+ return method(options);
+ }
+
+ return state.registry[name].reduce(function (method, registered) {
+ return registered.hook.bind(null, method, options);
+ }, method)();
+ });
+}
+
+
+/***/ }),
+
+/***/ 6819:
+/***/ ((module) => {
+
+module.exports = removeHook;
+
+function removeHook(state, name, method) {
+ if (!state.registry[name]) {
+ return;
+ }
+
+ var index = state.registry[name]
+ .map(function (registered) {
+ return registered.orig;
+ })
+ .indexOf(method);
+
+ if (index === -1) {
+ return;
+ }
+
+ state.registry[name].splice(index, 1);
+}
+
+
+/***/ }),
+
+/***/ 3717:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var concatMap = __nccwpck_require__(6891);
+var balanced = __nccwpck_require__(9417);
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+ return e;
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m || /\$$/.test(m.pre)) return [str];
+
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
+ }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
+ }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
+ }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
+ }
+ }
+ N.push(c);
+ }
+ } else {
+ N = concatMap(n, function(el) { return expand(el, false) });
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
+ }
+ }
+
+ return expansions;
+}
+
+
+
+/***/ }),
+
+/***/ 6891:
+/***/ ((module) => {
+
+module.exports = function (xs, fn) {
+ var res = [];
+ for (var i = 0; i < xs.length; i++) {
+ var x = fn(xs[i], i);
+ if (isArray(x)) res.push.apply(res, x);
+ else res.push(x);
+ }
+ return res;
+};
+
+var isArray = Array.isArray || function (xs) {
+ return Object.prototype.toString.call(xs) === '[object Array]';
+};
+
+
+/***/ }),
+
+/***/ 8932:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+class Deprecation extends Error {
+ constructor(message) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = 'Deprecation';
+ }
+
+}
+
+exports.Deprecation = Deprecation;
+
+
+/***/ }),
+
+/***/ 3287:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+/*!
+ * is-plain-object
+ *
+ * Copyright (c) 2014-2017, Jon Schlinkert.
+ * Released under the MIT License.
+ */
+
+function isObject(o) {
+ return Object.prototype.toString.call(o) === '[object Object]';
+}
+
+function isPlainObject(o) {
+ var ctor,prot;
+
+ if (isObject(o) === false) return false;
+
+ // If has modified constructor
+ ctor = o.constructor;
+ if (ctor === undefined) return true;
+
+ // If has modified prototype
+ prot = ctor.prototype;
+ if (isObject(prot) === false) return false;
+
+ // If constructor does not have an Object-specific method
+ if (prot.hasOwnProperty('isPrototypeOf') === false) {
+ return false;
+ }
+
+ // Most likely a plain Object
+ return true;
+}
+
+exports.isPlainObject = isPlainObject;
+
+
+/***/ }),
+
+/***/ 3973:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = minimatch
+minimatch.Minimatch = Minimatch
+
+var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || {
+ sep: '/'
+}
+minimatch.sep = path.sep
+
+var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
+var expand = __nccwpck_require__(3717)
+
+var plTypes = {
+ '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
+ '?': { open: '(?:', close: ')?' },
+ '+': { open: '(?:', close: ')+' },
+ '*': { open: '(?:', close: ')*' },
+ '@': { open: '(?:', close: ')' }
+}
+
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+var qmark = '[^/]'
+
+// * => any number of characters
+var star = qmark + '*?'
+
+// ** when dots are allowed. Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
+
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
+
+// characters that need to be escaped in RegExp.
+var reSpecials = charSet('().*{}+?[]^$\\!')
+
+// "abc" -> { a:true, b:true, c:true }
+function charSet (s) {
+ return s.split('').reduce(function (set, c) {
+ set[c] = true
+ return set
+ }, {})
+}
+
+// normalizes slashes.
+var slashSplit = /\/+/
+
+minimatch.filter = filter
+function filter (pattern, options) {
+ options = options || {}
+ return function (p, i, list) {
+ return minimatch(p, pattern, options)
+ }
+}
+
+function ext (a, b) {
+ b = b || {}
+ var t = {}
+ Object.keys(a).forEach(function (k) {
+ t[k] = a[k]
+ })
+ Object.keys(b).forEach(function (k) {
+ t[k] = b[k]
+ })
+ return t
+}
+
+minimatch.defaults = function (def) {
+ if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+ return minimatch
+ }
+
+ var orig = minimatch
+
+ var m = function minimatch (p, pattern, options) {
+ return orig(p, pattern, ext(def, options))
+ }
+
+ m.Minimatch = function Minimatch (pattern, options) {
+ return new orig.Minimatch(pattern, ext(def, options))
+ }
+ m.Minimatch.defaults = function defaults (options) {
+ return orig.defaults(ext(def, options)).Minimatch
+ }
+
+ m.filter = function filter (pattern, options) {
+ return orig.filter(pattern, ext(def, options))
+ }
+
+ m.defaults = function defaults (options) {
+ return orig.defaults(ext(def, options))
+ }
+
+ m.makeRe = function makeRe (pattern, options) {
+ return orig.makeRe(pattern, ext(def, options))
+ }
+
+ m.braceExpand = function braceExpand (pattern, options) {
+ return orig.braceExpand(pattern, ext(def, options))
+ }
+
+ m.match = function (list, pattern, options) {
+ return orig.match(list, pattern, ext(def, options))
+ }
+
+ return m
+}
+
+Minimatch.defaults = function (def) {
+ return minimatch.defaults(def).Minimatch
+}
+
+function minimatch (p, pattern, options) {
+ assertValidPattern(pattern)
+
+ if (!options) options = {}
+
+ // shortcut: comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === '#') {
+ return false
+ }
+
+ return new Minimatch(pattern, options).match(p)
+}
+
+function Minimatch (pattern, options) {
+ if (!(this instanceof Minimatch)) {
+ return new Minimatch(pattern, options)
+ }
+
+ assertValidPattern(pattern)
+
+ if (!options) options = {}
+
+ pattern = pattern.trim()
+
+ // windows support: need to use /, not \
+ if (!options.allowWindowsEscape && path.sep !== '/') {
+ pattern = pattern.split(path.sep).join('/')
+ }
+
+ this.options = options
+ this.set = []
+ this.pattern = pattern
+ this.regexp = null
+ this.negate = false
+ this.comment = false
+ this.empty = false
+ this.partial = !!options.partial
+
+ // make the set of regexps etc.
+ this.make()
+}
+
+Minimatch.prototype.debug = function () {}
+
+Minimatch.prototype.make = make
+function make () {
+ var pattern = this.pattern
+ var options = this.options
+
+ // empty patterns and comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === '#') {
+ this.comment = true
+ return
+ }
+ if (!pattern) {
+ this.empty = true
+ return
+ }
+
+ // step 1: figure out negation, etc.
+ this.parseNegate()
+
+ // step 2: expand braces
+ var set = this.globSet = this.braceExpand()
+
+ if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }
+
+ this.debug(this.pattern, set)
+
+ // step 3: now we have a set, so turn each one into a series of path-portion
+ // matching patterns.
+ // These will be regexps, except in the case of "**", which is
+ // set to the GLOBSTAR object for globstar behavior,
+ // and will not contain any / characters
+ set = this.globParts = set.map(function (s) {
+ return s.split(slashSplit)
+ })
+
+ this.debug(this.pattern, set)
+
+ // glob --> regexps
+ set = set.map(function (s, si, set) {
+ return s.map(this.parse, this)
+ }, this)
+
+ this.debug(this.pattern, set)
+
+ // filter out everything that didn't compile properly.
+ set = set.filter(function (s) {
+ return s.indexOf(false) === -1
+ })
+
+ this.debug(this.pattern, set)
+
+ this.set = set
+}
+
+Minimatch.prototype.parseNegate = parseNegate
+function parseNegate () {
+ var pattern = this.pattern
+ var negate = false
+ var options = this.options
+ var negateOffset = 0
+
+ if (options.nonegate) return
+
+ for (var i = 0, l = pattern.length
+ ; i < l && pattern.charAt(i) === '!'
+ ; i++) {
+ negate = !negate
+ negateOffset++
+ }
+
+ if (negateOffset) this.pattern = pattern.substr(negateOffset)
+ this.negate = negate
+}
+
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+minimatch.braceExpand = function (pattern, options) {
+ return braceExpand(pattern, options)
+}
+
+Minimatch.prototype.braceExpand = braceExpand
+
+function braceExpand (pattern, options) {
+ if (!options) {
+ if (this instanceof Minimatch) {
+ options = this.options
+ } else {
+ options = {}
+ }
+ }
+
+ pattern = typeof pattern === 'undefined'
+ ? this.pattern : pattern
+
+ assertValidPattern(pattern)
+
+ // Thanks to Yeting Li for
+ // improving this regexp to avoid a ReDOS vulnerability.
+ if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+ // shortcut. no need to expand.
+ return [pattern]
+ }
+
+ return expand(pattern)
+}
+
+var MAX_PATTERN_LENGTH = 1024 * 64
+var assertValidPattern = function (pattern) {
+ if (typeof pattern !== 'string') {
+ throw new TypeError('invalid pattern')
+ }
+
+ if (pattern.length > MAX_PATTERN_LENGTH) {
+ throw new TypeError('pattern is too long')
+ }
+}
+
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion. Otherwise, any series
+// of * is equivalent to a single *. Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+Minimatch.prototype.parse = parse
+var SUBPARSE = {}
+function parse (pattern, isSub) {
+ assertValidPattern(pattern)
+
+ var options = this.options
+
+ // shortcuts
+ if (pattern === '**') {
+ if (!options.noglobstar)
+ return GLOBSTAR
+ else
+ pattern = '*'
+ }
+ if (pattern === '') return ''
+
+ var re = ''
+ var hasMagic = !!options.nocase
+ var escaping = false
+ // ? => one single character
+ var patternListStack = []
+ var negativeLists = []
+ var stateChar
+ var inClass = false
+ var reClassStart = -1
+ var classStart = -1
+ // . and .. never match anything that doesn't start with .,
+ // even when options.dot is set.
+ var patternStart = pattern.charAt(0) === '.' ? '' // anything
+ // not (start or / followed by . or .. followed by / or end)
+ : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
+ : '(?!\\.)'
+ var self = this
+
+ function clearStateChar () {
+ if (stateChar) {
+ // we had some state-tracking character
+ // that wasn't consumed by this pass.
+ switch (stateChar) {
+ case '*':
+ re += star
+ hasMagic = true
+ break
+ case '?':
+ re += qmark
+ hasMagic = true
+ break
+ default:
+ re += '\\' + stateChar
+ break
+ }
+ self.debug('clearStateChar %j %j', stateChar, re)
+ stateChar = false
+ }
+ }
+
+ for (var i = 0, len = pattern.length, c
+ ; (i < len) && (c = pattern.charAt(i))
+ ; i++) {
+ this.debug('%s\t%s %s %j', pattern, i, re, c)
+
+ // skip over any that are escaped.
+ if (escaping && reSpecials[c]) {
+ re += '\\' + c
+ escaping = false
+ continue
+ }
+
+ switch (c) {
+ /* istanbul ignore next */
+ case '/': {
+ // completely not allowed, even escaped.
+ // Should already be path-split by now.
+ return false
+ }
+
+ case '\\':
+ clearStateChar()
+ escaping = true
+ continue
+
+ // the various stateChar values
+ // for the "extglob" stuff.
+ case '?':
+ case '*':
+ case '+':
+ case '@':
+ case '!':
+ this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
+
+ // all of those are literals inside a class, except that
+ // the glob [!a] means [^a] in regexp
+ if (inClass) {
+ this.debug(' in class')
+ if (c === '!' && i === classStart + 1) c = '^'
+ re += c
+ continue
+ }
+
+ // if we already have a stateChar, then it means
+ // that there was something like ** or +? in there.
+ // Handle the stateChar, then proceed with this one.
+ self.debug('call clearStateChar %j', stateChar)
+ clearStateChar()
+ stateChar = c
+ // if extglob is disabled, then +(asdf|foo) isn't a thing.
+ // just clear the statechar *now*, rather than even diving into
+ // the patternList stuff.
+ if (options.noext) clearStateChar()
+ continue
+
+ case '(':
+ if (inClass) {
+ re += '('
+ continue
+ }
+
+ if (!stateChar) {
+ re += '\\('
+ continue
+ }
+
+ patternListStack.push({
+ type: stateChar,
+ start: i - 1,
+ reStart: re.length,
+ open: plTypes[stateChar].open,
+ close: plTypes[stateChar].close
+ })
+ // negation is (?:(?!js)[^/]*)
+ re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
+ this.debug('plType %j %j', stateChar, re)
+ stateChar = false
+ continue
+
+ case ')':
+ if (inClass || !patternListStack.length) {
+ re += '\\)'
+ continue
+ }
+
+ clearStateChar()
+ hasMagic = true
+ var pl = patternListStack.pop()
+ // negation is (?:(?!js)[^/]*)
+ // The others are (?:)
+ re += pl.close
+ if (pl.type === '!') {
+ negativeLists.push(pl)
+ }
+ pl.reEnd = re.length
+ continue
+
+ case '|':
+ if (inClass || !patternListStack.length || escaping) {
+ re += '\\|'
+ escaping = false
+ continue
+ }
+
+ clearStateChar()
+ re += '|'
+ continue
+
+ // these are mostly the same in regexp and glob
+ case '[':
+ // swallow any state-tracking char before the [
+ clearStateChar()
+
+ if (inClass) {
+ re += '\\' + c
+ continue
+ }
+
+ inClass = true
+ classStart = i
+ reClassStart = re.length
+ re += c
+ continue
+
+ case ']':
+ // a right bracket shall lose its special
+ // meaning and represent itself in
+ // a bracket expression if it occurs
+ // first in the list. -- POSIX.2 2.8.3.2
+ if (i === classStart + 1 || !inClass) {
+ re += '\\' + c
+ escaping = false
+ continue
+ }
+
+ // handle the case where we left a class open.
+ // "[z-a]" is valid, equivalent to "\[z-a\]"
+ // split where the last [ was, make sure we don't have
+ // an invalid re. if so, re-walk the contents of the
+ // would-be class to re-translate any characters that
+ // were passed through as-is
+ // TODO: It would probably be faster to determine this
+ // without a try/catch and a new RegExp, but it's tricky
+ // to do safely. For now, this is safe and works.
+ var cs = pattern.substring(classStart + 1, i)
+ try {
+ RegExp('[' + cs + ']')
+ } catch (er) {
+ // not a valid class!
+ var sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
+ hasMagic = hasMagic || sp[1]
+ inClass = false
+ continue
+ }
+
+ // finish up the class.
+ hasMagic = true
+ inClass = false
+ re += c
+ continue
+
+ default:
+ // swallow any state char that wasn't consumed
+ clearStateChar()
+
+ if (escaping) {
+ // no need
+ escaping = false
+ } else if (reSpecials[c]
+ && !(c === '^' && inClass)) {
+ re += '\\'
+ }
+
+ re += c
+
+ } // switch
+ } // for
+
+ // handle the case where we left a class open.
+ // "[abc" is valid, equivalent to "\[abc"
+ if (inClass) {
+ // split where the last [ was, and escape it
+ // this is a huge pita. We now have to re-walk
+ // the contents of the would-be class to re-translate
+ // any characters that were passed through as-is
+ cs = pattern.substr(classStart + 1)
+ sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + '\\[' + sp[0]
+ hasMagic = hasMagic || sp[1]
+ }
+
+ // handle the case where we had a +( thing at the *end*
+ // of the pattern.
+ // each pattern list stack adds 3 chars, and we need to go through
+ // and escape any | chars that were passed through as-is for the regexp.
+ // Go through and escape them, taking care not to double-escape any
+ // | chars that were already escaped.
+ for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
+ var tail = re.slice(pl.reStart + pl.open.length)
+ this.debug('setting tail', re, pl)
+ // maybe some even number of \, then maybe 1 \, followed by a |
+ tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
+ if (!$2) {
+ // the | isn't already escaped, so escape it.
+ $2 = '\\'
+ }
+
+ // need to escape all those slashes *again*, without escaping the
+ // one that we need for escaping the | character. As it works out,
+ // escaping an even number of slashes can be done by simply repeating
+ // it exactly after itself. That's why this trick works.
+ //
+ // I am sorry that you have to see this.
+ return $1 + $1 + $2 + '|'
+ })
+
+ this.debug('tail=%j\n %s', tail, tail, pl, re)
+ var t = pl.type === '*' ? star
+ : pl.type === '?' ? qmark
+ : '\\' + pl.type
+
+ hasMagic = true
+ re = re.slice(0, pl.reStart) + t + '\\(' + tail
+ }
+
+ // handle trailing things that only matter at the very end.
+ clearStateChar()
+ if (escaping) {
+ // trailing \\
+ re += '\\\\'
+ }
+
+ // only need to apply the nodot start if the re starts with
+ // something that could conceivably capture a dot
+ var addPatternStart = false
+ switch (re.charAt(0)) {
+ case '[': case '.': case '(': addPatternStart = true
+ }
+
+ // Hack to work around lack of negative lookbehind in JS
+ // A pattern like: *.!(x).!(y|z) needs to ensure that a name
+ // like 'a.xyz.yz' doesn't match. So, the first negative
+ // lookahead, has to look ALL the way ahead, to the end of
+ // the pattern.
+ for (var n = negativeLists.length - 1; n > -1; n--) {
+ var nl = negativeLists[n]
+
+ var nlBefore = re.slice(0, nl.reStart)
+ var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
+ var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
+ var nlAfter = re.slice(nl.reEnd)
+
+ nlLast += nlAfter
+
+ // Handle nested stuff like *(*.js|!(*.json)), where open parens
+ // mean that we should *not* include the ) in the bit that is considered
+ // "after" the negated section.
+ var openParensBefore = nlBefore.split('(').length - 1
+ var cleanAfter = nlAfter
+ for (i = 0; i < openParensBefore; i++) {
+ cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
+ }
+ nlAfter = cleanAfter
+
+ var dollar = ''
+ if (nlAfter === '' && isSub !== SUBPARSE) {
+ dollar = '$'
+ }
+ var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
+ re = newRe
+ }
+
+ // if the re is not "" at this point, then we need to make sure
+ // it doesn't match against an empty path part.
+ // Otherwise a/* will match a/, which it should not.
+ if (re !== '' && hasMagic) {
+ re = '(?=.)' + re
+ }
+
+ if (addPatternStart) {
+ re = patternStart + re
+ }
+
+ // parsing just a piece of a larger pattern.
+ if (isSub === SUBPARSE) {
+ return [re, hasMagic]
+ }
+
+ // skip the regexp for non-magical patterns
+ // unescape anything in it, though, so that it'll be
+ // an exact match against a file etc.
+ if (!hasMagic) {
+ return globUnescape(pattern)
+ }
+
+ var flags = options.nocase ? 'i' : ''
+ try {
+ var regExp = new RegExp('^' + re + '$', flags)
+ } catch (er) /* istanbul ignore next - should be impossible */ {
+ // If it was an invalid regular expression, then it can't match
+ // anything. This trick looks for a character after the end of
+ // the string, which is of course impossible, except in multi-line
+ // mode, but it's not a /m regex.
+ return new RegExp('$.')
+ }
+
+ regExp._glob = pattern
+ regExp._src = re
+
+ return regExp
+}
+
+minimatch.makeRe = function (pattern, options) {
+ return new Minimatch(pattern, options || {}).makeRe()
+}
+
+Minimatch.prototype.makeRe = makeRe
+function makeRe () {
+ if (this.regexp || this.regexp === false) return this.regexp
+
+ // at this point, this.set is a 2d array of partial
+ // pattern strings, or "**".
+ //
+ // It's better to use .match(). This function shouldn't
+ // be used, really, but it's pretty convenient sometimes,
+ // when you just want to work with a regex.
+ var set = this.set
+
+ if (!set.length) {
+ this.regexp = false
+ return this.regexp
+ }
+ var options = this.options
+
+ var twoStar = options.noglobstar ? star
+ : options.dot ? twoStarDot
+ : twoStarNoDot
+ var flags = options.nocase ? 'i' : ''
+
+ var re = set.map(function (pattern) {
+ return pattern.map(function (p) {
+ return (p === GLOBSTAR) ? twoStar
+ : (typeof p === 'string') ? regExpEscape(p)
+ : p._src
+ }).join('\\\/')
+ }).join('|')
+
+ // must match entire pattern
+ // ending in a * or ** will make it less strict.
+ re = '^(?:' + re + ')$'
+
+ // can match anything, as long as it's not this.
+ if (this.negate) re = '^(?!' + re + ').*$'
+
+ try {
+ this.regexp = new RegExp(re, flags)
+ } catch (ex) /* istanbul ignore next - should be impossible */ {
+ this.regexp = false
+ }
+ return this.regexp
+}
+
+minimatch.match = function (list, pattern, options) {
+ options = options || {}
+ var mm = new Minimatch(pattern, options)
+ list = list.filter(function (f) {
+ return mm.match(f)
+ })
+ if (mm.options.nonull && !list.length) {
+ list.push(pattern)
+ }
+ return list
+}
+
+Minimatch.prototype.match = function match (f, partial) {
+ if (typeof partial === 'undefined') partial = this.partial
+ this.debug('match', f, this.pattern)
+ // short-circuit in the case of busted things.
+ // comments, etc.
+ if (this.comment) return false
+ if (this.empty) return f === ''
+
+ if (f === '/' && partial) return true
+
+ var options = this.options
+
+ // windows: need to use /, not \
+ if (path.sep !== '/') {
+ f = f.split(path.sep).join('/')
+ }
+
+ // treat the test path as a set of pathparts.
+ f = f.split(slashSplit)
+ this.debug(this.pattern, 'split', f)
+
+ // just ONE of the pattern sets in this.set needs to match
+ // in order for it to be valid. If negating, then just one
+ // match means that we have failed.
+ // Either way, return on the first hit.
+
+ var set = this.set
+ this.debug(this.pattern, 'set', set)
+
+ // Find the basename of the path by looking for the last non-empty segment
+ var filename
+ var i
+ for (i = f.length - 1; i >= 0; i--) {
+ filename = f[i]
+ if (filename) break
+ }
+
+ for (i = 0; i < set.length; i++) {
+ var pattern = set[i]
+ var file = f
+ if (options.matchBase && pattern.length === 1) {
+ file = [filename]
+ }
+ var hit = this.matchOne(file, pattern, partial)
+ if (hit) {
+ if (options.flipNegate) return true
+ return !this.negate
+ }
+ }
+
+ // didn't get any hits. this is success if it's a negative
+ // pattern, failure otherwise.
+ if (options.flipNegate) return false
+ return this.negate
+}
+
+// set partial to true to test if, for example,
+// "/a/b" matches the start of "/*/b/*/d"
+// Partial means, if you run out of file before you run
+// out of pattern, then that's fine, as long as all
+// the parts match.
+Minimatch.prototype.matchOne = function (file, pattern, partial) {
+ var options = this.options
+
+ this.debug('matchOne',
+ { 'this': this, file: file, pattern: pattern })
+
+ this.debug('matchOne', file.length, pattern.length)
+
+ for (var fi = 0,
+ pi = 0,
+ fl = file.length,
+ pl = pattern.length
+ ; (fi < fl) && (pi < pl)
+ ; fi++, pi++) {
+ this.debug('matchOne loop')
+ var p = pattern[pi]
+ var f = file[fi]
+
+ this.debug(pattern, p, f)
+
+ // should be impossible.
+ // some invalid regexp stuff in the set.
+ /* istanbul ignore if */
+ if (p === false) return false
+
+ if (p === GLOBSTAR) {
+ this.debug('GLOBSTAR', [pattern, p, f])
+
+ // "**"
+ // a/**/b/**/c would match the following:
+ // a/b/x/y/z/c
+ // a/x/y/z/b/c
+ // a/b/x/b/x/c
+ // a/b/c
+ // To do this, take the rest of the pattern after
+ // the **, and see if it would match the file remainder.
+ // If so, return success.
+ // If not, the ** "swallows" a segment, and try again.
+ // This is recursively awful.
+ //
+ // a/**/b/**/c matching a/b/x/y/z/c
+ // - a matches a
+ // - doublestar
+ // - matchOne(b/x/y/z/c, b/**/c)
+ // - b matches b
+ // - doublestar
+ // - matchOne(x/y/z/c, c) -> no
+ // - matchOne(y/z/c, c) -> no
+ // - matchOne(z/c, c) -> no
+ // - matchOne(c, c) yes, hit
+ var fr = fi
+ var pr = pi + 1
+ if (pr === pl) {
+ this.debug('** at the end')
+ // a ** at the end will just swallow the rest.
+ // We have found a match.
+ // however, it will not swallow /.x, unless
+ // options.dot is set.
+ // . and .. are *never* matched by **, for explosively
+ // exponential reasons.
+ for (; fi < fl; fi++) {
+ if (file[fi] === '.' || file[fi] === '..' ||
+ (!options.dot && file[fi].charAt(0) === '.')) return false
+ }
+ return true
+ }
+
+ // ok, let's see if we can swallow whatever we can.
+ while (fr < fl) {
+ var swallowee = file[fr]
+
+ this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
+
+ // XXX remove this slice. Just pass the start index.
+ if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+ this.debug('globstar found match!', fr, fl, swallowee)
+ // found a match.
+ return true
+ } else {
+ // can't swallow "." or ".." ever.
+ // can only swallow ".foo" when explicitly asked.
+ if (swallowee === '.' || swallowee === '..' ||
+ (!options.dot && swallowee.charAt(0) === '.')) {
+ this.debug('dot detected!', file, fr, pattern, pr)
+ break
+ }
+
+ // ** swallows a segment, and continue.
+ this.debug('globstar swallow a segment, and continue')
+ fr++
+ }
+ }
+
+ // no match was found.
+ // However, in partial mode, we can't say this is necessarily over.
+ // If there's more *pattern* left, then
+ /* istanbul ignore if */
+ if (partial) {
+ // ran out of file
+ this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
+ if (fr === fl) return true
+ }
+ return false
+ }
+
+ // something other than **
+ // non-magic patterns just have to match exactly
+ // patterns with magic have been turned into regexps.
+ var hit
+ if (typeof p === 'string') {
+ hit = f === p
+ this.debug('string match', p, f, hit)
+ } else {
+ hit = f.match(p)
+ this.debug('pattern match', p, f, hit)
+ }
+
+ if (!hit) return false
+ }
+
+ // Note: ending in / means that we'll get a final ""
+ // at the end of the pattern. This can only match a
+ // corresponding "" at the end of the file.
+ // If the file ends in /, then it can only match a
+ // a pattern that ends in /, unless the pattern just
+ // doesn't have any more for it. But, a/b/ should *not*
+ // match "a/b/*", even though "" matches against the
+ // [^/]*? pattern, except in partial mode, where it might
+ // simply not be reached yet.
+ // However, a/b/ should still satisfy a/*
+
+ // now either we fell off the end of the pattern, or we're done.
+ if (fi === fl && pi === pl) {
+ // ran out of pattern and filename at the same time.
+ // an exact hit!
+ return true
+ } else if (fi === fl) {
+ // ran out of file, but still had pattern left.
+ // this is ok if we're doing the match as part of
+ // a glob fs traversal.
+ return partial
+ } else /* istanbul ignore else */ if (pi === pl) {
+ // ran out of pattern, still have file left.
+ // this is only acceptable if we're on the very last
+ // empty segment of a file with a trailing slash.
+ // a/* should match a/b/
+ return (fi === fl - 1) && (file[fi] === '')
+ }
+
+ // should be unreachable.
+ /* istanbul ignore next */
+ throw new Error('wtf?')
+}
+
+// replace stuff like \* with *
+function globUnescape (s) {
+ return s.replace(/\\(.)/g, '$1')
+}
+
+function regExpEscape (s) {
+ return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
+}
+
+
+/***/ }),
+
+/***/ 1223:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var wrappy = __nccwpck_require__(2940)
+module.exports = wrappy(once)
+module.exports.strict = wrappy(onceStrict)
+
+once.proto = once(function () {
+ Object.defineProperty(Function.prototype, 'once', {
+ value: function () {
+ return once(this)
+ },
+ configurable: true
+ })
+
+ Object.defineProperty(Function.prototype, 'onceStrict', {
+ value: function () {
+ return onceStrict(this)
+ },
+ configurable: true
+ })
+})
+
+function once (fn) {
+ var f = function () {
+ if (f.called) return f.value
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ f.called = false
+ return f
+}
+
+function onceStrict (fn) {
+ var f = function () {
+ if (f.called)
+ throw new Error(f.onceError)
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ var name = fn.name || 'Function wrapped with `once`'
+ f.onceError = name + " shouldn't be called more than once"
+ f.called = false
+ return f
+}
+
+
+/***/ }),
+
+/***/ 2043:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+;(function (sax) { // wrapper for non-node envs
+ sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }
+ sax.SAXParser = SAXParser
+ sax.SAXStream = SAXStream
+ sax.createStream = createStream
+
+ // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.
+ // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),
+ // since that's the earliest that a buffer overrun could occur. This way, checks are
+ // as rare as required, but as often as necessary to ensure never crossing this bound.
+ // Furthermore, buffers are only tested at most once per write(), so passing a very
+ // large string into write() might have undesirable effects, but this is manageable by
+ // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme
+ // edge case, result in creating at most one complete copy of the string passed in.
+ // Set to Infinity to have unlimited buffers.
+ sax.MAX_BUFFER_LENGTH = 64 * 1024
+
+ var buffers = [
+ 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',
+ 'procInstName', 'procInstBody', 'entity', 'attribName',
+ 'attribValue', 'cdata', 'script'
+ ]
+
+ sax.EVENTS = [
+ 'text',
+ 'processinginstruction',
+ 'sgmldeclaration',
+ 'doctype',
+ 'comment',
+ 'opentagstart',
+ 'attribute',
+ 'opentag',
+ 'closetag',
+ 'opencdata',
+ 'cdata',
+ 'closecdata',
+ 'error',
+ 'end',
+ 'ready',
+ 'script',
+ 'opennamespace',
+ 'closenamespace'
+ ]
+
+ function SAXParser (strict, opt) {
+ if (!(this instanceof SAXParser)) {
+ return new SAXParser(strict, opt)
+ }
+
+ var parser = this
+ clearBuffers(parser)
+ parser.q = parser.c = ''
+ parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH
+ parser.opt = opt || {}
+ parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags
+ parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'
+ parser.tags = []
+ parser.closed = parser.closedRoot = parser.sawRoot = false
+ parser.tag = parser.error = null
+ parser.strict = !!strict
+ parser.noscript = !!(strict || parser.opt.noscript)
+ parser.state = S.BEGIN
+ parser.strictEntities = parser.opt.strictEntities
+ parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)
+ parser.attribList = []
+
+ // namespaces form a prototype chain.
+ // it always points at the current tag,
+ // which protos to its parent tag.
+ if (parser.opt.xmlns) {
+ parser.ns = Object.create(rootNS)
+ }
+
+ // mostly just for error reporting
+ parser.trackPosition = parser.opt.position !== false
+ if (parser.trackPosition) {
+ parser.position = parser.line = parser.column = 0
+ }
+ emit(parser, 'onready')
+ }
+
+ if (!Object.create) {
+ Object.create = function (o) {
+ function F () {}
+ F.prototype = o
+ var newf = new F()
+ return newf
+ }
+ }
+
+ if (!Object.keys) {
+ Object.keys = function (o) {
+ var a = []
+ for (var i in o) if (o.hasOwnProperty(i)) a.push(i)
+ return a
+ }
+ }
+
+ function checkBufferLength (parser) {
+ var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)
+ var maxActual = 0
+ for (var i = 0, l = buffers.length; i < l; i++) {
+ var len = parser[buffers[i]].length
+ if (len > maxAllowed) {
+ // Text/cdata nodes can get big, and since they're buffered,
+ // we can get here under normal conditions.
+ // Avoid issues by emitting the text node now,
+ // so at least it won't get any bigger.
+ switch (buffers[i]) {
+ case 'textNode':
+ closeText(parser)
+ break
+
+ case 'cdata':
+ emitNode(parser, 'oncdata', parser.cdata)
+ parser.cdata = ''
+ break
+
+ case 'script':
+ emitNode(parser, 'onscript', parser.script)
+ parser.script = ''
+ break
+
+ default:
+ error(parser, 'Max buffer length exceeded: ' + buffers[i])
+ }
+ }
+ maxActual = Math.max(maxActual, len)
+ }
+ // schedule the next check for the earliest possible buffer overrun.
+ var m = sax.MAX_BUFFER_LENGTH - maxActual
+ parser.bufferCheckPosition = m + parser.position
+ }
+
+ function clearBuffers (parser) {
+ for (var i = 0, l = buffers.length; i < l; i++) {
+ parser[buffers[i]] = ''
+ }
+ }
+
+ function flushBuffers (parser) {
+ closeText(parser)
+ if (parser.cdata !== '') {
+ emitNode(parser, 'oncdata', parser.cdata)
+ parser.cdata = ''
+ }
+ if (parser.script !== '') {
+ emitNode(parser, 'onscript', parser.script)
+ parser.script = ''
+ }
+ }
+
+ SAXParser.prototype = {
+ end: function () { end(this) },
+ write: write,
+ resume: function () { this.error = null; return this },
+ close: function () { return this.write(null) },
+ flush: function () { flushBuffers(this) }
+ }
+
+ var Stream
+ try {
+ Stream = (__nccwpck_require__(2781).Stream)
+ } catch (ex) {
+ Stream = function () {}
+ }
+ if (!Stream) Stream = function () {}
+
+ var streamWraps = sax.EVENTS.filter(function (ev) {
+ return ev !== 'error' && ev !== 'end'
+ })
+
+ function createStream (strict, opt) {
+ return new SAXStream(strict, opt)
+ }
+
+ function SAXStream (strict, opt) {
+ if (!(this instanceof SAXStream)) {
+ return new SAXStream(strict, opt)
+ }
+
+ Stream.apply(this)
+
+ this._parser = new SAXParser(strict, opt)
+ this.writable = true
+ this.readable = true
+
+ var me = this
+
+ this._parser.onend = function () {
+ me.emit('end')
+ }
+
+ this._parser.onerror = function (er) {
+ me.emit('error', er)
+
+ // if didn't throw, then means error was handled.
+ // go ahead and clear error, so we can write again.
+ me._parser.error = null
+ }
+
+ this._decoder = null
+
+ streamWraps.forEach(function (ev) {
+ Object.defineProperty(me, 'on' + ev, {
+ get: function () {
+ return me._parser['on' + ev]
+ },
+ set: function (h) {
+ if (!h) {
+ me.removeAllListeners(ev)
+ me._parser['on' + ev] = h
+ return h
+ }
+ me.on(ev, h)
+ },
+ enumerable: true,
+ configurable: false
+ })
+ })
+ }
+
+ SAXStream.prototype = Object.create(Stream.prototype, {
+ constructor: {
+ value: SAXStream
+ }
+ })
+
+ SAXStream.prototype.write = function (data) {
+ if (typeof Buffer === 'function' &&
+ typeof Buffer.isBuffer === 'function' &&
+ Buffer.isBuffer(data)) {
+ if (!this._decoder) {
+ var SD = (__nccwpck_require__(1576).StringDecoder)
+ this._decoder = new SD('utf8')
+ }
+ data = this._decoder.write(data)
+ }
+
+ this._parser.write(data.toString())
+ this.emit('data', data)
+ return true
+ }
+
+ SAXStream.prototype.end = function (chunk) {
+ if (chunk && chunk.length) {
+ this.write(chunk)
+ }
+ this._parser.end()
+ return true
+ }
+
+ SAXStream.prototype.on = function (ev, handler) {
+ var me = this
+ if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {
+ me._parser['on' + ev] = function () {
+ var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)
+ args.splice(0, 0, ev)
+ me.emit.apply(me, args)
+ }
+ }
+
+ return Stream.prototype.on.call(me, ev, handler)
+ }
+
+ // this really needs to be replaced with character classes.
+ // XML allows all manner of ridiculous numbers and digits.
+ var CDATA = '[CDATA['
+ var DOCTYPE = 'DOCTYPE'
+ var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
+ var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'
+ var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }
+
+ // http://www.w3.org/TR/REC-xml/#NT-NameStartChar
+ // This implementation works on strings, a single character at a time
+ // as such, it cannot ever support astral-plane characters (10000-EFFFF)
+ // without a significant breaking change to either this parser, or the
+ // JavaScript language. Implementation of an emoji-capable xml parser
+ // is left as an exercise for the reader.
+ var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
+
+ var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
+
+ var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
+ var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
+
+ function isWhitespace (c) {
+ return c === ' ' || c === '\n' || c === '\r' || c === '\t'
+ }
+
+ function isQuote (c) {
+ return c === '"' || c === '\''
+ }
+
+ function isAttribEnd (c) {
+ return c === '>' || isWhitespace(c)
+ }
+
+ function isMatch (regex, c) {
+ return regex.test(c)
+ }
+
+ function notMatch (regex, c) {
+ return !isMatch(regex, c)
+ }
+
+ var S = 0
+ sax.STATE = {
+ BEGIN: S++, // leading byte order mark or whitespace
+ BEGIN_WHITESPACE: S++, // leading whitespace
+ TEXT: S++, // general stuff
+ TEXT_ENTITY: S++, // & and such.
+ OPEN_WAKA: S++, // <
+ SGML_DECL: S++, //
+ SCRIPT: S++, //