diff --git a/.github/linters/.eslintrc.yml b/.github/linters/.eslintrc.yml index f452aba..2aa279d 100644 --- a/.github/linters/.eslintrc.yml +++ b/.github/linters/.eslintrc.yml @@ -41,6 +41,8 @@ rules: 'eslint-comments/no-unused-disable': 'off', 'i18n-text/no-en': 'off', 'import/no-namespace': 'off', + 'import/no-commonjs': 'off', + 'import/first': 'off', 'no-console': 'off', 'no-unused-vars': 'off', 'prettier/prettier': 'error', @@ -63,12 +65,12 @@ rules: '@typescript-eslint/no-misused-new': 'error', '@typescript-eslint/no-namespace': 'error', '@typescript-eslint/no-non-null-assertion': 'warn', - '@typescript-eslint/no-require-imports': 'error', + '@typescript-eslint/no-require-imports': 'off', '@typescript-eslint/no-unnecessary-qualifier': 'error', '@typescript-eslint/no-unnecessary-type-assertion': 'error', '@typescript-eslint/no-unused-vars': 'error', '@typescript-eslint/no-useless-constructor': 'error', - '@typescript-eslint/no-var-requires': 'error', + '@typescript-eslint/no-var-requires': 'off', '@typescript-eslint/prefer-for-of': 'warn', '@typescript-eslint/prefer-function-type': 'warn', '@typescript-eslint/prefer-includes': 'error', diff --git a/.github/workflows/check-dist.yml b/.github/workflows/check-dist.yml index 5c4bd8b..d3b426b 100644 --- a/.github/workflows/check-dist.yml +++ b/.github/workflows/check-dist.yml @@ -17,6 +17,10 @@ on: branches: - main +concurrency: + group: hf-oss-dist-${{ github.ref }} + cancel-in-progress: true + permissions: contents: read diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c6d2861..9133892 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,6 +10,13 @@ on: permissions: contents: read +env: + OSS_ACCESS_KEY_ID: ${{ secrets.OSS_ACCESS_KEY_ID }} + OSS_ACCESS_KEY_SECRET: ${{ secrets.OSS_ACCESS_KEY_SECRET }} + +concurrency: + group: hf-oss-ci-${{ github.ref }} + cancel-in-progress: true jobs: test-typescript: @@ -44,21 +51,46 @@ jobs: id: npm-ci-test run: npm run ci-test - test-action: + list: name: GitHub Actions Test runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.list-action.outputs.matrix }} steps: - name: Checkout id: checkout uses: actions/checkout@v4 + - run: git clone https://huggingface.co/J5Tsai/debug-static-files + env: + GIT_LFS_SKIP_SMUDGE: 1 + - name: Test Local Action - id: test-action - uses: ./ + id: list-action + uses: ./actions/list with: - milliseconds: 2000 + repo-dir: debug-static-files - name: Print Output id: output - run: echo "${{ steps.test-action.outputs.time }}" + run: echo "${{ steps.list-action.outputs.matrix }}" + + sync-oss: + name: 'Sync to OSS' + runs-on: ubuntu-latest + needs: list + strategy: + fail-fast: false + max-parallel: 12 + matrix: ${{ fromJson(needs.list.outputs.matrix) }} + steps: + - name: Checkout + id: checkout + uses: actions/checkout@v4 + + - name: Test Local Action + id: list-action + uses: ./actions/sync + with: + urls: ${{ matrix.task.urls }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 5f8e060..21e02ad 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -10,6 +10,10 @@ on: schedule: - cron: '31 7 * * 3' +concurrency: + group: hf-code-ci-${{ github.ref }} + cancel-in-progress: true + permissions: actions: read checks: write diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 7b72f4b..1797328 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -13,6 +13,10 @@ permissions: packages: read statuses: write +concurrency: + group: hf-oss-linter-${{ github.ref }} + cancel-in-progress: true + jobs: lint: name: Lint Codebase diff --git a/README.md b/README.md index 3e1c006..43b77e4 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,9 @@ +# Development + +```bash +GIT_LFS_SKIP_SMUDGE=1 git clone https://huggingface.co/J5Tsai/debug-static-file +``` + # Create a GitHub Action Using TypeScript [![GitHub Super-Linter](https://github.com/actions/typescript-action/actions/workflows/linter.yml/badge.svg)](https://github.com/super-linter/super-linter) diff --git a/__tests__/main.test.ts b/__tests__/main.test.ts index ce373fb..bbd5dfd 100644 --- a/__tests__/main.test.ts +++ b/__tests__/main.test.ts @@ -12,33 +12,30 @@ import * as main from '../src/main' // Mock the action's main function const runMock = jest.spyOn(main, 'run') -// Other utilities -const timeRegex = /^\d{2}:\d{2}:\d{2}/ - // Mock the GitHub Actions core library -let debugMock: jest.SpiedFunction let errorMock: jest.SpiedFunction let getInputMock: jest.SpiedFunction -let setFailedMock: jest.SpiedFunction let setOutputMock: jest.SpiedFunction describe('action', () => { beforeEach(() => { jest.clearAllMocks() - debugMock = jest.spyOn(core, 'debug').mockImplementation() errorMock = jest.spyOn(core, 'error').mockImplementation() getInputMock = jest.spyOn(core, 'getInput').mockImplementation() - setFailedMock = jest.spyOn(core, 'setFailed').mockImplementation() setOutputMock = jest.spyOn(core, 'setOutput').mockImplementation() }) - it('sets the time output', async () => { + it('sets the file output', async () => { // Set the action's inputs as return values from core.getInput() getInputMock.mockImplementation(name => { switch (name) { - case 'milliseconds': - return '500' + case 'repo-dir': + return getRepoDir() + case 'repo-ref': + return 'main' + case 'action': + return 'list' default: return '' } @@ -46,44 +43,20 @@ describe('action', () => { await main.run() expect(runMock).toHaveReturned() - - // Verify that all of the core library functions were called correctly - expect(debugMock).toHaveBeenNthCalledWith(1, 'Waiting 500 milliseconds ...') - expect(debugMock).toHaveBeenNthCalledWith( - 2, - expect.stringMatching(timeRegex) - ) - expect(debugMock).toHaveBeenNthCalledWith( - 3, - expect.stringMatching(timeRegex) - ) + expect(setOutputMock).toHaveBeenNthCalledWith(1, 'file', '.gitattributes') + expect(setOutputMock).toHaveBeenNthCalledWith(3, 'lfs', 'sd3demo.jpg') expect(setOutputMock).toHaveBeenNthCalledWith( - 1, - 'time', - expect.stringMatching(timeRegex) - ) - expect(errorMock).not.toHaveBeenCalled() - }) - - it('sets a failed status', async () => { - // Set the action's inputs as return values from core.getInput() - getInputMock.mockImplementation(name => { - switch (name) { - case 'milliseconds': - return 'this is not a number' - default: - return '' - } - }) - - await main.run() - expect(runMock).toHaveReturned() - - // Verify that all of the core library functions were called correctly - expect(setFailedMock).toHaveBeenNthCalledWith( - 1, - 'milliseconds not a number' + 4, + 'lfs_url', + expect.stringMatching( + /https:\/\/huggingface.co\/J5Tsai\/debug-static-files\/resolve\/.+\/sd3demo.jpg/ + ) ) expect(errorMock).not.toHaveBeenCalled() }) }) +function getRepoDir(): string { + const fromEnv = process.env.HF_TO_OSS_DEBUG_REPO_DIR + if (fromEnv) return fromEnv + throw new Error('Function not implemented.') +} diff --git a/__tests__/wait.test.ts b/__tests__/wait.test.ts deleted file mode 100644 index 1336aaa..0000000 --- a/__tests__/wait.test.ts +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Unit tests for src/wait.ts - */ - -import { wait } from '../src/wait' -import { expect } from '@jest/globals' - -describe('wait.ts', () => { - it('throws an invalid number', async () => { - const input = parseInt('foo', 10) - expect(isNaN(input)).toBe(true) - - await expect(wait(input)).rejects.toThrow('milliseconds not a number') - }) - - it('waits with a valid number', async () => { - const start = new Date() - await wait(500) - const end = new Date() - - const delta = Math.abs(end.getTime() - start.getTime()) - - expect(delta).toBeGreaterThan(450) - }) -}) diff --git a/actions/list/action.yml b/actions/list/action.yml new file mode 100644 index 0000000..bc2c667 --- /dev/null +++ b/actions/list/action.yml @@ -0,0 +1,32 @@ +name: 'list git large files' +description: 'Provide a description here' +author: 'Your name or organization here' + +# Add your action's branding here. This will appear on the GitHub Marketplace. +branding: + icon: 'heart' + color: 'red' + +# Define your inputs here. +inputs: + action: + description: '' + required: false + default: 'list' + repo-dir: + description: 'Your input description here' + required: true + default: '1000' + repo-ref: + description: 'Your input description here' + required: false + default: 'HEAD' + +# Define your outputs here. +outputs: + matrix: + description: '' + +runs: + using: node20 + main: ../../dist/index.js diff --git a/actions/sync/action.yml b/actions/sync/action.yml new file mode 100644 index 0000000..d72c651 --- /dev/null +++ b/actions/sync/action.yml @@ -0,0 +1,22 @@ +name: 'sync git large files' +description: 'Provide a description here' +author: 'Your name or organization here' + +# Add your action's branding here. This will appear on the GitHub Marketplace. +branding: + icon: 'heart' + color: 'red' + +# Define your inputs here. +inputs: + action: + description: '' + required: false + default: 'sync' + urls: + description: 'Your input description here' + required: true + +runs: + using: node20 + main: ../../dist/index.js diff --git a/badges/coverage.svg b/badges/coverage.svg index 5bb55be..597a74e 100644 --- a/badges/coverage.svg +++ b/badges/coverage.svg @@ -1 +1 @@ -Coverage: 100%Coverage100% \ No newline at end of file +Coverage: 65.15%Coverage65.15% \ No newline at end of file diff --git a/dist/index.js b/dist/index.js index b782fc3..3345c5f 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,7 +1,7 @@ require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ -/***/ 7351: +/***/ 87351: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -27,7 +27,7 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.issue = exports.issueCommand = void 0; -const os = __importStar(__nccwpck_require__(2037)); +const os = __importStar(__nccwpck_require__(22037)); const utils_1 = __nccwpck_require__(5278); /** * Commands @@ -100,7 +100,7 @@ function escapeProperty(s) { /***/ }), -/***/ 2186: +/***/ 42186: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -135,12 +135,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; -const command_1 = __nccwpck_require__(7351); +const command_1 = __nccwpck_require__(87351); const file_command_1 = __nccwpck_require__(717); const utils_1 = __nccwpck_require__(5278); -const os = __importStar(__nccwpck_require__(2037)); -const path = __importStar(__nccwpck_require__(1017)); -const oidc_utils_1 = __nccwpck_require__(8041); +const os = __importStar(__nccwpck_require__(22037)); +const path = __importStar(__nccwpck_require__(71017)); +const oidc_utils_1 = __nccwpck_require__(98041); /** * The code to exit an action */ @@ -425,12 +425,12 @@ exports.getIDToken = getIDToken; /** * Summary exports */ -var summary_1 = __nccwpck_require__(1327); +var summary_1 = __nccwpck_require__(81327); Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); /** * @deprecated use core.summary */ -var summary_2 = __nccwpck_require__(1327); +var summary_2 = __nccwpck_require__(81327); Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); /** * Path exports @@ -472,9 +472,9 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__nccwpck_require__(7147)); -const os = __importStar(__nccwpck_require__(2037)); -const uuid_1 = __nccwpck_require__(5840); +const fs = __importStar(__nccwpck_require__(57147)); +const os = __importStar(__nccwpck_require__(22037)); +const uuid_1 = __nccwpck_require__(75840); const utils_1 = __nccwpck_require__(5278); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; @@ -508,7 +508,7 @@ exports.prepareKeyValueMessage = prepareKeyValueMessage; /***/ }), -/***/ 8041: +/***/ 98041: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -524,9 +524,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(6255); -const auth_1 = __nccwpck_require__(5526); -const core_1 = __nccwpck_require__(2186); +const http_client_1 = __nccwpck_require__(96255); +const auth_1 = __nccwpck_require__(35526); +const core_1 = __nccwpck_require__(42186); class OidcClient { static createHttpClient(allowRetry = true, maxRetry = 10) { const requestOptions = { @@ -618,7 +618,7 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; -const path = __importStar(__nccwpck_require__(1017)); +const path = __importStar(__nccwpck_require__(71017)); /** * toPosixPath converts the given path to the posix form. On Windows, \\ will be * replaced with /. @@ -657,7 +657,7 @@ exports.toPlatformPath = toPlatformPath; /***/ }), -/***/ 1327: +/***/ 81327: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -673,8 +673,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; -const os_1 = __nccwpck_require__(2037); -const fs_1 = __nccwpck_require__(7147); +const os_1 = __nccwpck_require__(22037); +const fs_1 = __nccwpck_require__(57147); const { access, appendFile, writeFile } = fs_1.promises; exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; @@ -994,7 +994,7 @@ exports.toCommandProperties = toCommandProperties; /***/ }), -/***/ 5526: +/***/ 35526: /***/ (function(__unused_webpack_module, exports) { "use strict"; @@ -1082,7 +1082,7 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand /***/ }), -/***/ 6255: +/***/ 96255: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -1122,11 +1122,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; -const http = __importStar(__nccwpck_require__(3685)); -const https = __importStar(__nccwpck_require__(5687)); -const pm = __importStar(__nccwpck_require__(9835)); -const tunnel = __importStar(__nccwpck_require__(4294)); -const undici_1 = __nccwpck_require__(1773); +const http = __importStar(__nccwpck_require__(13685)); +const https = __importStar(__nccwpck_require__(95687)); +const pm = __importStar(__nccwpck_require__(19835)); +const tunnel = __importStar(__nccwpck_require__(74294)); +const undici_1 = __nccwpck_require__(41773); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -1745,7 +1745,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa /***/ }), -/***/ 9835: +/***/ 19835: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -1834,16587 +1834,51915 @@ function isLoopbackAddress(host) { /***/ }), -/***/ 4294: +/***/ 90777: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = __nccwpck_require__(4219); - - -/***/ }), - -/***/ 4219: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - "use strict"; -var net = __nccwpck_require__(1808); -var tls = __nccwpck_require__(4404); -var http = __nccwpck_require__(3685); -var https = __nccwpck_require__(5687); -var events = __nccwpck_require__(2361); -var assert = __nccwpck_require__(9491); -var util = __nccwpck_require__(3837); - - -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; +var os = __nccwpck_require__(22037); +var fs = __nccwpck_require__(57147); +var child = __nccwpck_require__(32081); +var DEFAULT_RESOLV_FILE = '/etc/resolv.conf'; -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; +function getInterfaceName() { + var val = 'eth'; + var platform = os.platform(); + if (platform === 'darwin') { + val = 'en'; + } else if (platform === 'win32') { + val = null; + } + return val; } -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; +function getIfconfigCMD() { + if (os.platform() === 'win32') { + return 'ipconfig/all'; + } + return '/sbin/ifconfig'; } -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; +// typeof os.networkInterfaces family is a number (v18.0.0) +// types: 'IPv4' | 'IPv6' => 4 | 6 +// @see https://github.com/nodejs/node/issues/42861 +function matchName(actualFamily, expectedFamily) { + if (expectedFamily === 'IPv4') { + return actualFamily === 'IPv4' || actualFamily === 4; + } + if (expectedFamily === 'IPv6') { + return actualFamily === 'IPv6' || actualFamily === 6; + } + return actualFamily === expectedFamily; } -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; +/** + * Get all addresses. + * + * @param {String} [interfaceName] interface name, default is 'eth' on linux, 'en' on mac os. + * @param {Function(err, addr)} callback + * - {Object} addr { + * - {String} ip + * - {String} ipv6 + * - {String} mac + * } + */ +function address(interfaceName, callback) { + if (typeof interfaceName === 'function') { + callback = interfaceName; + interfaceName = null; + } + + var addr = { + ip: address.ip(interfaceName), + ipv6: address.ipv6(interfaceName), + mac: null + }; + address.mac(interfaceName, function (err, mac) { + if (mac) { + addr.mac = mac; + } + callback(err, addr); + }); } - -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; - - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; +address.interface = function (family, name) { + var interfaces = os.networkInterfaces(); + var noName = !name; + name = name || getInterfaceName(); + family = family || 'IPv4'; + for (var i = -1; i < 8; i++) { + var interfaceName = name + (i >= 0 ? i : ''); // support 'lo' and 'lo0' + var items = interfaces[interfaceName]; + if (items) { + for (var j = 0; j < items.length; j++) { + var item = items[j]; + if (matchName(item.family, family)) { + return item; + } } } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); - -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; } - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); - - function onFree() { - self.emit('free', socket, options); + if (noName) { + // filter all loopback or local addresses + for (var k in interfaces) { + var items = interfaces[k]; + for (var i = 0; i < items.length; i++) { + var item = items[i]; + // all 127 addresses are local and should be ignored + if (matchName(item.family, family) && !item.address.startsWith('127.')) { + return item; + } + } } + } + return; +}; - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); +/** + * Get current machine IPv4 + * + * @param {String} [interfaceName] interface name, default is 'eth' on linux, 'en' on mac os. + * @return {String} IP address + */ +address.ip = function (interfaceName) { + var item = address.interface('IPv4', interfaceName); + return item && item.address; }; -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); +/** + * Get current machine IPv6 + * + * @param {String} [interfaceName] interface name, default is 'eth' on linux, 'en' on mac os. + * @return {String} IP address + */ +address.ipv6 = function (interfaceName) { + var item = address.interface('IPv6', interfaceName); + return item && item.address; +}; - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port +// osx start line 'en0: flags=8863 mtu 1500' +// linux start line 'eth0 Link encap:Ethernet HWaddr 00:16:3E:00:0A:29 ' +var MAC_OSX_START_LINE = /^(\w+)\:\s+flags=/; +var MAC_LINUX_START_LINE = /^(\w+)\s{2,}link encap:\w+/i; + +// ether 78:ca:39:b0:e6:7d +// HWaddr 00:16:3E:00:0A:29 +var MAC_RE = address.MAC_RE = /(?:ether|HWaddr)\s+((?:[a-z0-9]{2}\:){5}[a-z0-9]{2})/i; + +// osx: inet 192.168.2.104 netmask 0xffffff00 broadcast 192.168.2.255 +// linux: inet addr:10.125.5.202 Bcast:10.125.15.255 Mask:255.255.240.0 +var MAC_IP_RE = address.MAC_IP_RE = /inet\s(?:addr\:)?(\d+\.\d+\.\d+\.\d+)/; + +function getMAC(content, interfaceName, matchIP) { + var lines = content.split('\n'); + for (var i = 0; i < lines.length; i++) { + var line = lines[i].trimRight(); + var m = MAC_OSX_START_LINE.exec(line) || MAC_LINUX_START_LINE.exec(line); + if (!m) { + continue; } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); + // check interface name + var name = m[1]; + if (name.indexOf(interfaceName) !== 0) { + continue; + } - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } + var ip = null; + var mac = null; + var match = MAC_RE.exec(line); + if (match) { + mac = match[1]; + } - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } + i++; + while (true) { + line = lines[i]; + if (!line || MAC_OSX_START_LINE.exec(line) || MAC_LINUX_START_LINE.exec(line)) { + i--; + break; // hit next interface, handle next interface + } + if (!mac) { + match = MAC_RE.exec(line); + if (match) { + mac = match[1]; + } + } - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); + if (!ip) { + match = MAC_IP_RE.exec(line); + if (match) { + ip = match[1]; + } + } - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + i++; } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + + if (ip === matchIP) { + return mac; } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); } +} - function onError(cause) { - connectReq.removeAllListeners(); - - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); +/** + * Get current machine MAC address + * + * @param {String} [interfaceName] interface name, default is 'eth' on linux, 'en' on mac os. + * @param {Function(err, address)} callback + */ +address.mac = function (interfaceName, callback) { + if (typeof interfaceName === 'function') { + callback = interfaceName; + interfaceName = null; + } + interfaceName = interfaceName || getInterfaceName(); + var item = address.interface('IPv4', interfaceName); + if (!item) { + return callback(); } -}; -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; + // https://github.com/nodejs/node/issues/13581 + // bug in node 7.x and <= 8.4.0 + if (!process.env.CI && (item.mac === 'ff:00:00:00:00:00' || item.mac === '00:00:00:00:00:00')) { + // wrong address, ignore it + item.mac = ''; } - this.sockets.splice(pos, 1); - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); + if (item.mac) { + return callback(null, item.mac); } -}; -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); + child.exec(getIfconfigCMD(), {timeout: 5000}, function (err, stdout, stderr) { + if (err || !stdout) { + return callback(err); + } - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); + var mac = getMAC(stdout || '', interfaceName, item.address); + callback(null, mac); }); -} +}; +// nameserver 172.24.102.254 +var DNS_SERVER_RE = /^nameserver\s+(\d+\.\d+\.\d+\.\d+)$/i; -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; +/** + * Get DNS servers. + * + * @param {String} [filepath] resolv config file path. default is '/etc/resolv.conf'. + * @param {Function(err, servers)} callback + */ +address.dns = function (filepath, callback) { + if (typeof filepath === 'function') { + callback = filepath; + filepath = null; } - return host; // for v0.11 or later -} - -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } + filepath = filepath || DEFAULT_RESOLV_FILE; + fs.readFile(filepath, 'utf8', function (err, content) { + if (err) { + return callback(err); + } + var servers = []; + content = content || ''; + var lines = content.split('\n'); + for (var i = 0; i < lines.length; i++) { + var line = lines[i].trim(); + var m = DNS_SERVER_RE.exec(line); + if (m) { + servers.push(m[1]); } } - } - return target; -} + callback(null, servers); + }); +}; -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); - } -} else { - debug = function() {}; -} -exports.debug = debug; // for test +module.exports = address; /***/ }), -/***/ 1773: +/***/ 34623: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const Client = __nccwpck_require__(3598) -const Dispatcher = __nccwpck_require__(412) -const errors = __nccwpck_require__(8045) -const Pool = __nccwpck_require__(4634) -const BalancedPool = __nccwpck_require__(7931) -const Agent = __nccwpck_require__(7890) -const util = __nccwpck_require__(3983) -const { InvalidArgumentError } = errors -const api = __nccwpck_require__(4059) -const buildConnector = __nccwpck_require__(2067) -const MockClient = __nccwpck_require__(8687) -const MockAgent = __nccwpck_require__(6771) -const MockPool = __nccwpck_require__(6193) -const mockErrors = __nccwpck_require__(888) -const ProxyAgent = __nccwpck_require__(7858) -const RetryHandler = __nccwpck_require__(2286) -const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(1892) -const DecoratorHandler = __nccwpck_require__(6930) -const RedirectHandler = __nccwpck_require__(2860) -const createRedirectInterceptor = __nccwpck_require__(8861) +module.exports = __nccwpck_require__(65006); +module.exports.HttpsAgent = __nccwpck_require__(55500); -let hasCrypto -try { - __nccwpck_require__(6113) - hasCrypto = true -} catch { - hasCrypto = false -} -Object.assign(Dispatcher.prototype, api) +/***/ }), -module.exports.Dispatcher = Dispatcher -module.exports.Client = Client -module.exports.Pool = Pool -module.exports.BalancedPool = BalancedPool -module.exports.Agent = Agent -module.exports.ProxyAgent = ProxyAgent -module.exports.RetryHandler = RetryHandler +/***/ 61256: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -module.exports.DecoratorHandler = DecoratorHandler -module.exports.RedirectHandler = RedirectHandler -module.exports.createRedirectInterceptor = createRedirectInterceptor +"use strict"; +var __webpack_unused_export__; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -module.exports.buildConnector = buildConnector -module.exports.errors = errors +// patch from https://github.com/nodejs/node/blob/v7.2.1/lib/_http_agent.js -function makeDispatcher (fn) { - return (url, opts, handler) => { - if (typeof opts === 'function') { - handler = opts - opts = null - } - if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) { - throw new InvalidArgumentError('invalid url') - } - if (opts != null && typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') - } +const net = __nccwpck_require__(41808); +const util = __nccwpck_require__(73837); +const EventEmitter = __nccwpck_require__(82361); +const debug = util.debuglog('http'); - if (opts && opts.path != null) { - if (typeof opts.path !== 'string') { - throw new InvalidArgumentError('invalid opts.path') - } +// New Agent code. - let path = opts.path - if (!opts.path.startsWith('/')) { - path = `/${path}` - } +// The largest departure from the previous implementation is that +// an Agent instance holds connections for a variable number of host:ports. +// Surprisingly, this is still API compatible as far as third parties are +// concerned. The only code that really notices the difference is the +// request object. - url = new URL(util.parseOrigin(url).origin + path) +// Another departure is that all code related to HTTP parsing is in +// ClientRequest.onSocket(). The Agent is now *strictly* +// concerned with managing a connection pool. + +function Agent(options) { + if (!(this instanceof Agent)) + return new Agent(options); + + EventEmitter.call(this); + + var self = this; + + self.defaultPort = 80; + self.protocol = 'http:'; + + self.options = Object.assign({}, options); + + // don't confuse net and make it think that we're connecting to a pipe + self.options.path = null; + self.requests = {}; + self.sockets = {}; + self.freeSockets = {}; + self.keepAliveMsecs = self.options.keepAliveMsecs || 1000; + self.keepAlive = self.options.keepAlive || false; + self.maxSockets = self.options.maxSockets || Agent.defaultMaxSockets; + self.maxFreeSockets = self.options.maxFreeSockets || 256; + + // [patch start] + // free keep-alive socket timeout. By default free socket do not have a timeout. + self.freeSocketKeepAliveTimeout = self.options.freeSocketKeepAliveTimeout || 0; + // working socket timeout. By default working socket do not have a timeout. + self.timeout = self.options.timeout || 0; + // the socket active time to live, even if it's in use + this.socketActiveTTL = this.options.socketActiveTTL || null; + // [patch end] + + self.on('free', function(socket, options) { + var name = self.getName(options); + debug('agent.on(free)', name); + + if (socket.writable && + self.requests[name] && self.requests[name].length) { + // [patch start] + debug('continue handle next request'); + // [patch end] + self.requests[name].shift().onSocket(socket); + if (self.requests[name].length === 0) { + // don't leak + delete self.requests[name]; + } } else { - if (!opts) { - opts = typeof url === 'object' ? url : {} + // If there are no pending requests, then put it in + // the freeSockets pool, but only if we're allowed to do so. + var req = socket._httpMessage; + if (req && + req.shouldKeepAlive && + socket.writable && + self.keepAlive) { + var freeSockets = self.freeSockets[name]; + var freeLen = freeSockets ? freeSockets.length : 0; + var count = freeLen; + if (self.sockets[name]) + count += self.sockets[name].length; + + if (count > self.maxSockets || freeLen >= self.maxFreeSockets) { + socket.destroy(); + } else { + freeSockets = freeSockets || []; + self.freeSockets[name] = freeSockets; + socket.setKeepAlive(true, self.keepAliveMsecs); + socket.unref(); + socket._httpMessage = null; + self.removeSocket(socket, options); + freeSockets.push(socket); + + // [patch start] + // Add a default error handler to avoid Unhandled 'error' event throw on idle socket + // https://github.com/node-modules/agentkeepalive/issues/25 + // https://github.com/nodejs/node/pull/4482 (fixed in >= 4.4.0 and >= 5.4.0) + if (socket.listeners('error').length === 0) { + socket.once('error', freeSocketErrorListener); + } + // set free keepalive timer + // try to use socket custom freeSocketKeepAliveTimeout first + const freeSocketKeepAliveTimeout = socket.freeSocketKeepAliveTimeout || self.freeSocketKeepAliveTimeout; + socket.setTimeout(freeSocketKeepAliveTimeout); + debug(`push to free socket queue and wait for ${freeSocketKeepAliveTimeout}ms`); + // [patch end] + } + } else { + socket.destroy(); } - - url = util.parseURL(url) } + }); +} - const { agent, dispatcher = getGlobalDispatcher() } = opts +util.inherits(Agent, EventEmitter); +exports.P = Agent; - if (agent) { - throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?') - } +// [patch start] +function freeSocketErrorListener(err) { + var socket = this; + debug('SOCKET ERROR on FREE socket:', err.message, err.stack); + socket.destroy(); + socket.emit('agentRemove'); +} +// [patch end] - return fn.call(dispatcher, { - ...opts, - origin: url.origin, - path: url.search ? `${url.pathname}${url.search}` : url.pathname, - method: opts.method || (opts.body ? 'PUT' : 'GET') - }, handler) +Agent.defaultMaxSockets = Infinity; + +Agent.prototype.createConnection = net.createConnection; + +// Get the key for a given set of request options +Agent.prototype.getName = function getName(options) { + var name = options.host || 'localhost'; + + name += ':'; + if (options.port) + name += options.port; + + name += ':'; + if (options.localAddress) + name += options.localAddress; + + // Pacify parallel/test-http-agent-getname by only appending + // the ':' when options.family is set. + if (options.family === 4 || options.family === 6) + name += ':' + options.family; + + return name; +}; + +// [patch start] +function handleSocketCreation(req) { + return function(err, newSocket) { + if (err) { + process.nextTick(function() { + req.emit('error', err); + }); + return; + } + req.onSocket(newSocket); } } +// [patch end] -module.exports.setGlobalDispatcher = setGlobalDispatcher -module.exports.getGlobalDispatcher = getGlobalDispatcher +Agent.prototype.addRequest = function addRequest(req, options, port/*legacy*/, + localAddress/*legacy*/) { + // Legacy API: addRequest(req, host, port, localAddress) + if (typeof options === 'string') { + options = { + host: options, + port, + localAddress + }; + } -if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) { - let fetchImpl = null - module.exports.fetch = async function fetch (resource) { - if (!fetchImpl) { - fetchImpl = (__nccwpck_require__(4881).fetch) + options = Object.assign({}, options); + options = Object.assign(options, this.options); + + if (!options.servername) + options.servername = calculateServerName(options, req); + + var name = this.getName(options); + if (!this.sockets[name]) { + this.sockets[name] = []; + } + + var freeLen = this.freeSockets[name] ? this.freeSockets[name].length : 0; + var sockLen = freeLen + this.sockets[name].length; + + if (freeLen) { + // we have a free socket, so use that. + var socket = this.freeSockets[name].shift(); + debug('have free socket'); + + // [patch start] + // remove free socket error event handler + socket.removeListener('error', freeSocketErrorListener); + // restart the default timer + socket.setTimeout(this.timeout); + + if (this.socketActiveTTL && Date.now() - socket.createdTime > this.socketActiveTTL) { + debug(`socket ${socket.createdTime} expired`); + socket.destroy(); + return this.createSocket(req, options, handleSocketCreation(req)); } + // [patch end] - try { - return await fetchImpl(...arguments) - } catch (err) { - if (typeof err === 'object') { - Error.captureStackTrace(err, this) - } + // don't leak + if (!this.freeSockets[name].length) + delete this.freeSockets[name]; - throw err + socket.ref(); + req.onSocket(socket); + this.sockets[name].push(socket); + } else if (sockLen < this.maxSockets) { + debug('call onSocket', sockLen, freeLen); + // If we are under maxSockets create a new one. + // [patch start] + this.createSocket(req, options, handleSocketCreation(req)); + // [patch end] + } else { + debug('wait for socket'); + // We are over limit so we'll add it to the queue. + if (!this.requests[name]) { + this.requests[name] = []; } + this.requests[name].push(req); } - module.exports.Headers = __nccwpck_require__(554).Headers - module.exports.Response = __nccwpck_require__(7823).Response - module.exports.Request = __nccwpck_require__(8359).Request - module.exports.FormData = __nccwpck_require__(2015).FormData - module.exports.File = __nccwpck_require__(8511).File - module.exports.FileReader = __nccwpck_require__(1446).FileReader - - const { setGlobalOrigin, getGlobalOrigin } = __nccwpck_require__(1246) +}; - module.exports.setGlobalOrigin = setGlobalOrigin - module.exports.getGlobalOrigin = getGlobalOrigin +Agent.prototype.createSocket = function createSocket(req, options, cb) { + var self = this; + options = Object.assign({}, options); + options = Object.assign(options, self.options); + + if (!options.servername) + options.servername = calculateServerName(options, req); + + var name = self.getName(options); + options._agentKey = name; + + debug('createConnection', name, options); + options.encoding = null; + var called = false; + const newSocket = self.createConnection(options, oncreate); + // [patch start] + if (newSocket) { + oncreate(null, Object.assign(newSocket, { createdTime: Date.now() })); + } + // [patch end] + function oncreate(err, s) { + if (called) + return; + called = true; + if (err) + return cb(err); + if (!self.sockets[name]) { + self.sockets[name] = []; + } + self.sockets[name].push(s); + debug('sockets', name, self.sockets[name].length); - const { CacheStorage } = __nccwpck_require__(7907) - const { kConstruct } = __nccwpck_require__(9174) + function onFree() { + self.emit('free', s, options); + } + s.on('free', onFree); + + function onClose(err) { + debug('CLIENT socket onClose'); + // This is the only place where sockets get removed from the Agent. + // If you want to remove a socket from the pool, just close it. + // All socket errors end in a close event anyway. + self.removeSocket(s, options); + + // [patch start] + self.emit('close'); + // [patch end] + } + s.on('close', onClose); + + // [patch start] + // start socket timeout handler + function onTimeout() { + debug('CLIENT socket onTimeout'); + s.destroy(); + // Remove it from freeSockets immediately to prevent new requests from being sent through this socket. + self.removeSocket(s, options); + self.emit('timeout'); + } + s.on('timeout', onTimeout); + // set the default timer + s.setTimeout(self.timeout); + // [patch end] + + function onRemove() { + // We need this function for cases like HTTP 'upgrade' + // (defined by WebSockets) where we need to remove a socket from the + // pool because it'll be locked up indefinitely + debug('CLIENT socket onRemove'); + self.removeSocket(s, options); + s.removeListener('close', onClose); + s.removeListener('free', onFree); + s.removeListener('agentRemove', onRemove); + + // [patch start] + // remove socket timeout handler + s.setTimeout(0, onTimeout); + // [patch end] + } + s.on('agentRemove', onRemove); + cb(null, s); + } +}; - // Cache & CacheStorage are tightly coupled with fetch. Even if it may run - // in an older version of Node, it doesn't have any use without fetch. - module.exports.caches = new CacheStorage(kConstruct) +function calculateServerName(options, req) { + let servername = options.host; + const hostHeader = req.getHeader('host'); + if (hostHeader) { + // abc => abc + // abc:123 => abc + // [::1] => ::1 + // [::1]:123 => ::1 + if (hostHeader.startsWith('[')) { + const index = hostHeader.indexOf(']'); + if (index === -1) { + // Leading '[', but no ']'. Need to do something... + servername = hostHeader; + } else { + servername = hostHeader.substr(1, index - 1); + } + } else { + servername = hostHeader.split(':', 1)[0]; + } + } + return servername; } -if (util.nodeMajor >= 16) { - const { deleteCookie, getCookies, getSetCookies, setCookie } = __nccwpck_require__(1724) +Agent.prototype.removeSocket = function removeSocket(s, options) { + var name = this.getName(options); + debug('removeSocket', name, 'writable:', s.writable); + var sets = [this.sockets]; - module.exports.deleteCookie = deleteCookie - module.exports.getCookies = getCookies - module.exports.getSetCookies = getSetCookies - module.exports.setCookie = setCookie + // If the socket was destroyed, remove it from the free buffers too. + if (!s.writable) + sets.push(this.freeSockets); - const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) + for (var sk = 0; sk < sets.length; sk++) { + var sockets = sets[sk]; - module.exports.parseMIMEType = parseMIMEType - module.exports.serializeAMimeType = serializeAMimeType -} + if (sockets[name]) { + var index = sockets[name].indexOf(s); + if (index !== -1) { + sockets[name].splice(index, 1); + // Don't leak + if (sockets[name].length === 0) + delete sockets[name]; + } + } + } -if (util.nodeMajor >= 18 && hasCrypto) { - const { WebSocket } = __nccwpck_require__(4284) + // [patch start] + var freeLen = this.freeSockets[name] ? this.freeSockets[name].length : 0; + var sockLen = freeLen + (this.sockets[name] ? this.sockets[name].length : 0); + // [patch end] - module.exports.WebSocket = WebSocket -} + if (this.requests[name] && this.requests[name].length && sockLen < this.maxSockets) { + debug('removeSocket, have a request, make a socket'); + var req = this.requests[name][0]; + // If we have pending requests and a socket gets closed make a new one + this.createSocket(req, options, function(err, newSocket) { + if (err) { + process.nextTick(function() { + req.emit('error', err); + }); + return; + } + newSocket.emit('free'); + }); + } +}; -module.exports.request = makeDispatcher(api.request) -module.exports.stream = makeDispatcher(api.stream) -module.exports.pipeline = makeDispatcher(api.pipeline) -module.exports.connect = makeDispatcher(api.connect) -module.exports.upgrade = makeDispatcher(api.upgrade) +Agent.prototype.destroy = function destroy() { + var sets = [this.freeSockets, this.sockets]; + for (var s = 0; s < sets.length; s++) { + var set = sets[s]; + var keys = Object.keys(set); + for (var v = 0; v < keys.length; v++) { + var setName = set[keys[v]]; + for (var n = 0; n < setName.length; n++) { + setName[n].destroy(); + } + } + } +}; -module.exports.MockClient = MockClient -module.exports.MockPool = MockPool -module.exports.MockAgent = MockAgent -module.exports.mockErrors = mockErrors +__webpack_unused_export__ = new Agent(); /***/ }), -/***/ 7890: +/***/ 65006: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; +/** + * refer: + * * @atimb "Real keep-alive HTTP agent": https://gist.github.com/2963672 + * * https://github.com/joyent/node/blob/master/lib/http.js + * * https://github.com/joyent/node/blob/master/lib/https.js + * * https://github.com/joyent/node/blob/master/lib/_http_agent.js + */ -const { InvalidArgumentError } = __nccwpck_require__(8045) -const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = __nccwpck_require__(2785) -const DispatcherBase = __nccwpck_require__(4839) -const Pool = __nccwpck_require__(4634) -const Client = __nccwpck_require__(3598) -const util = __nccwpck_require__(3983) -const createRedirectInterceptor = __nccwpck_require__(8861) -const { WeakRef, FinalizationRegistry } = __nccwpck_require__(6436)() - -const kOnConnect = Symbol('onConnect') -const kOnDisconnect = Symbol('onDisconnect') -const kOnConnectionError = Symbol('onConnectionError') -const kMaxRedirections = Symbol('maxRedirections') -const kOnDrain = Symbol('onDrain') -const kFactory = Symbol('factory') -const kFinalizer = Symbol('finalizer') -const kOptions = Symbol('options') - -function defaultFactory (origin, opts) { - return opts && opts.connections === 1 - ? new Client(origin, opts) - : new Pool(origin, opts) -} -class Agent extends DispatcherBase { - constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { - super() +const OriginalAgent = (__nccwpck_require__(61256)/* .Agent */ .P); +const ms = __nccwpck_require__(10845); - if (typeof factory !== 'function') { - throw new InvalidArgumentError('factory must be a function.') +class Agent extends OriginalAgent { + constructor(options) { + options = options || {}; + options.keepAlive = options.keepAlive !== false; + // default is keep-alive and 15s free socket timeout + if (options.freeSocketKeepAliveTimeout === undefined) { + options.freeSocketKeepAliveTimeout = 15000; } - - if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { - throw new InvalidArgumentError('connect must be a function or an object') + // Legacy API: keepAliveTimeout should be rename to `freeSocketKeepAliveTimeout` + if (options.keepAliveTimeout) { + options.freeSocketKeepAliveTimeout = options.keepAliveTimeout; } + options.freeSocketKeepAliveTimeout = ms(options.freeSocketKeepAliveTimeout); - if (!Number.isInteger(maxRedirections) || maxRedirections < 0) { - throw new InvalidArgumentError('maxRedirections must be a positive number') + // Sets the socket to timeout after timeout milliseconds of inactivity on the socket. + // By default is double free socket keepalive timeout. + if (options.timeout === undefined) { + options.timeout = options.freeSocketKeepAliveTimeout * 2; + // make sure socket default inactivity timeout >= 30s + if (options.timeout < 30000) { + options.timeout = 30000; + } } + options.timeout = ms(options.timeout); - if (connect && typeof connect !== 'function') { - connect = { ...connect } - } + super(options); - this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent) - ? options.interceptors.Agent - : [createRedirectInterceptor({ maxRedirections })] + this.createSocketCount = 0; + this.createSocketCountLastCheck = 0; - this[kOptions] = { ...util.deepClone(options), connect } - this[kOptions].interceptors = options.interceptors - ? { ...options.interceptors } - : undefined - this[kMaxRedirections] = maxRedirections - this[kFactory] = factory - this[kClients] = new Map() - this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => { - const ref = this[kClients].get(key) - if (ref !== undefined && ref.deref() === undefined) { - this[kClients].delete(key) - } - }) + this.createSocketErrorCount = 0; + this.createSocketErrorCountLastCheck = 0; - const agent = this + this.closeSocketCount = 0; + this.closeSocketCountLastCheck = 0; - this[kOnDrain] = (origin, targets) => { - agent.emit('drain', origin, [agent, ...targets]) - } + // socket error event count + this.errorSocketCount = 0; + this.errorSocketCountLastCheck = 0; - this[kOnConnect] = (origin, targets) => { - agent.emit('connect', origin, [agent, ...targets]) - } + this.requestCount = 0; + this.requestCountLastCheck = 0; - this[kOnDisconnect] = (origin, targets, err) => { - agent.emit('disconnect', origin, [agent, ...targets], err) - } + this.timeoutSocketCount = 0; + this.timeoutSocketCountLastCheck = 0; - this[kOnConnectionError] = (origin, targets, err) => { - agent.emit('connectionError', origin, [agent, ...targets], err) - } + this.on('free', s => { + this.requestCount++; + // last enter free queue timestamp + s.lastFreeTime = Date.now(); + }); + this.on('timeout', () => { + this.timeoutSocketCount++; + }); + this.on('close', () => { + this.closeSocketCount++; + }); + this.on('error', () => { + this.errorSocketCount++; + }); } - get [kRunning] () { - let ret = 0 - for (const ref of this[kClients].values()) { - const client = ref.deref() - /* istanbul ignore next: gc is undeterministic */ - if (client) { - ret += client[kRunning] + createSocket(req, options, cb) { + super.createSocket(req, options, (err, socket) => { + if (err) { + this.createSocketErrorCount++; + return cb(err); } - } - return ret + if (this.keepAlive) { + // Disable Nagle's algorithm: http://blog.caustik.com/2012/04/08/scaling-node-js-to-100k-concurrent-connections/ + // https://fengmk2.com/benchmark/nagle-algorithm-delayed-ack-mock.html + socket.setNoDelay(true); + } + this.createSocketCount++; + cb(null, socket); + }); } - [kDispatch] (opts, handler) { - let key - if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) { - key = String(opts.origin) - } else { - throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.') + get statusChanged() { + const changed = this.createSocketCount !== this.createSocketCountLastCheck || + this.createSocketErrorCount !== this.createSocketErrorCountLastCheck || + this.closeSocketCount !== this.closeSocketCountLastCheck || + this.errorSocketCount !== this.errorSocketCountLastCheck || + this.timeoutSocketCount !== this.timeoutSocketCountLastCheck || + this.requestCount !== this.requestCountLastCheck; + if (changed) { + this.createSocketCountLastCheck = this.createSocketCount; + this.createSocketErrorCountLastCheck = this.createSocketErrorCount; + this.closeSocketCountLastCheck = this.closeSocketCount; + this.errorSocketCountLastCheck = this.errorSocketCount; + this.timeoutSocketCountLastCheck = this.timeoutSocketCount; + this.requestCountLastCheck = this.requestCount; } - - const ref = this[kClients].get(key) - - let dispatcher = ref ? ref.deref() : null - if (!dispatcher) { - dispatcher = this[kFactory](opts.origin, this[kOptions]) - .on('drain', this[kOnDrain]) - .on('connect', this[kOnConnect]) - .on('disconnect', this[kOnDisconnect]) - .on('connectionError', this[kOnConnectionError]) - - this[kClients].set(key, new WeakRef(dispatcher)) - this[kFinalizer].register(dispatcher, key) - } - - return dispatcher.dispatch(opts, handler) + return changed; } - async [kClose] () { - const closePromises = [] - for (const ref of this[kClients].values()) { - const client = ref.deref() - /* istanbul ignore else: gc is undeterministic */ - if (client) { - closePromises.push(client.close()) - } - } - - await Promise.all(closePromises) + getCurrentStatus() { + return { + createSocketCount: this.createSocketCount, + createSocketErrorCount: this.createSocketErrorCount, + closeSocketCount: this.closeSocketCount, + errorSocketCount: this.errorSocketCount, + timeoutSocketCount: this.timeoutSocketCount, + requestCount: this.requestCount, + freeSockets: inspect(this.freeSockets), + sockets: inspect(this.sockets), + requests: inspect(this.requests), + }; } +} - async [kDestroy] (err) { - const destroyPromises = [] - for (const ref of this[kClients].values()) { - const client = ref.deref() - /* istanbul ignore else: gc is undeterministic */ - if (client) { - destroyPromises.push(client.destroy(err)) - } - } +module.exports = Agent; - await Promise.all(destroyPromises) +function inspect(obj) { + const res = {}; + for (const key in obj) { + res[key] = obj[key].length; } + return res; } -module.exports = Agent - /***/ }), -/***/ 7032: +/***/ 55500: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const { addAbortListener } = __nccwpck_require__(3983) -const { RequestAbortedError } = __nccwpck_require__(8045) +"use strict"; +/** + * Https Agent base on custom http agent + */ -const kListener = Symbol('kListener') -const kSignal = Symbol('kSignal') -function abort (self) { - if (self.abort) { - self.abort() - } else { - self.onError(new RequestAbortedError()) - } -} -function addSignal (self, signal) { - self[kSignal] = null - self[kListener] = null +const https = __nccwpck_require__(95687); +const HttpAgent = __nccwpck_require__(65006); +const OriginalHttpsAgent = https.Agent; - if (!signal) { - return - } +class HttpsAgent extends HttpAgent { + constructor(options) { + super(options); - if (signal.aborted) { - abort(self) - return - } + this.defaultPort = 443; + this.protocol = 'https:'; + this.maxCachedSessions = this.options.maxCachedSessions; + if (this.maxCachedSessions === undefined) { + this.maxCachedSessions = 100; + } - self[kSignal] = signal - self[kListener] = () => { - abort(self) + this._sessionCache = { + map: {}, + list: [], + }; } - - addAbortListener(self[kSignal], self[kListener]) } -function removeSignal (self) { - if (!self[kSignal]) { - return +[ + 'createConnection', + 'getName', + '_getSession', + '_cacheSession', + // https://github.com/nodejs/node/pull/4982 + '_evictSession', +].forEach(function(method) { + if (typeof OriginalHttpsAgent.prototype[method] === 'function') { + HttpsAgent.prototype[method] = OriginalHttpsAgent.prototype[method]; } +}); - if ('removeEventListener' in self[kSignal]) { - self[kSignal].removeEventListener('abort', self[kListener]) - } else { - self[kSignal].removeListener('abort', self[kListener]) - } +module.exports = HttpsAgent; - self[kSignal] = null - self[kListener] = null -} -module.exports = { - addSignal, - removeSignal -} +/***/ }), +/***/ 93775: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/***/ }), +const assert = __nccwpck_require__(39491); +const { isArray } = __nccwpck_require__(71954); +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { formatTag } = __nccwpck_require__(97115); -/***/ 9744: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +const proto = exports; -"use strict"; +function toArray(obj) { + if (!obj) return []; + if (isArray(obj)) return obj; + return [obj]; +} +/** + * Bucket opertaions + */ -const { AsyncResource } = __nccwpck_require__(852) -const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { addSignal, removeSignal } = __nccwpck_require__(7032) +proto.listBuckets = async function listBuckets(query = {}, options = {}) { + // prefix, marker, max-keys -class ConnectHandler extends AsyncResource { - constructor (opts, callback) { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') + const { subres = {} } = query; + const rest = {}; + for (const key in query) { + if (key !== 'subres') { + rest[key] = query[key]; } + } + const params = this._bucketRequestParams('GET', '', Object.assign(subres, options.subres), options); - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } + params.query = rest; - const { signal, opaque, responseHeaders } = opts + const result = await this.request(params); - if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { - throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + if (result.status === 200) { + const data = await this.parseXML(result.data); + let buckets = data.Buckets || null; + if (buckets) { + if (buckets.Bucket) { + buckets = buckets.Bucket; + } + if (!isArray(buckets)) { + buckets = [buckets]; + } + buckets = buckets.map(item => ({ + name: item.Name, + region: item.Location, + creationDate: item.CreationDate, + storageClass: item.StorageClass, + StorageClass: item.StorageClass, + tag: formatTag(item) + })); } + return { + buckets, + owner: { + id: data.Owner.ID, + displayName: data.Owner.DisplayName + }, + isTruncated: data.IsTruncated === 'true', + nextMarker: data.NextMarker || null, + res: result.res + }; + } - super('UNDICI_CONNECT') + throw await this.requestError(result); +}; - this.opaque = opaque || null - this.responseHeaders = responseHeaders || null - this.callback = callback - this.abort = null +proto.useBucket = function useBucket(name) { + _checkBucketName(name); + return this.setBucket(name); +}; - addSignal(this, signal) - } +proto.setBucket = function useBucket(name) { + _checkBucketName(name); + this.options.bucket = name; + return this; +}; - onConnect (abort, context) { - if (!this.callback) { - throw new RequestAbortedError() - } +proto.getBucket = function getBucket() { + return this.options.bucket; +}; - this.abort = abort - this.context = context +proto.getBucketLocation = async function getBucketLocation(name, options) { + _checkBucketName(name); + name = name || this.getBucket(); + const params = this._bucketRequestParams('GET', name, 'location', options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + return { + location: result.data, + res: result.res + }; +}; + +proto.getBucketInfo = async function getBucketInfo(name, options) { + _checkBucketName(name); + name = name || this.getBucket(); + const params = this._bucketRequestParams('GET', name, 'bucketInfo', options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + return { + bucket: result.data.Bucket, + res: result.res + }; +}; + +proto.deleteBucket = async function deleteBucket(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('DELETE', name, '', options); + const result = await this.request(params); + if (result.status === 200 || result.status === 204) { + return { + res: result.res + }; } + throw await this.requestError(result); +}; - onHeaders () { - throw new SocketError('bad connect', null) +// acl + +proto.putBucketACL = async function putBucketACL(name, acl, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('PUT', name, 'acl', options); + params.headers = { + 'x-oss-acl': acl + }; + params.successStatuses = [200]; + const result = await this.request(params); + return { + bucket: (result.headers.location && result.headers.location.substring(1)) || null, + res: result.res + }; +}; + +proto.getBucketACL = async function getBucketACL(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('GET', name, 'acl', options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + return { + acl: result.data.AccessControlList.Grant, + owner: { + id: result.data.Owner.ID, + displayName: result.data.Owner.DisplayName + }, + res: result.res + }; +}; + +// logging + +proto.putBucketLogging = async function putBucketLogging(name, prefix, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('PUT', name, 'logging', options); + let xml = `${'\n\n\n'}${name}\n`; + if (prefix) { + xml += `${prefix}\n`; } + xml += '\n'; + params.content = xml; + params.mime = 'xml'; + params.successStatuses = [200]; + const result = await this.request(params); + return { + res: result.res + }; +}; - onUpgrade (statusCode, rawHeaders, socket) { - const { callback, opaque, context } = this +proto.getBucketLogging = async function getBucketLogging(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('GET', name, 'logging', options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + const enable = result.data.LoggingEnabled; + return { + enable: !!enable, + prefix: (enable && enable.TargetPrefix) || null, + res: result.res + }; +}; - removeSignal(this) +proto.deleteBucketLogging = async function deleteBucketLogging(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('DELETE', name, 'logging', options); + params.successStatuses = [204, 200]; + const result = await this.request(params); + return { + res: result.res + }; +}; - this.callback = null +proto.putBucketCORS = async function putBucketCORS(name, rules, options) { + _checkBucketName(name); + rules = rules || []; + assert(rules.length, 'rules is required'); + rules.forEach(rule => { + assert(rule.allowedOrigin, 'allowedOrigin is required'); + assert(rule.allowedMethod, 'allowedMethod is required'); + }); - let headers = rawHeaders - // Indicates is an HTTP2Session - if (headers != null) { - headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - } + const params = this._bucketRequestParams('PUT', name, 'cors', options); + let xml = '\n'; + const parseOrigin = val => { + xml += `${val}`; + }; + const parseMethod = val => { + xml += `${val}`; + }; + const parseHeader = val => { + xml += `${val}`; + }; + const parseExposeHeader = val => { + xml += `${val}`; + }; + for (let i = 0, l = rules.length; i < l; i++) { + const rule = rules[i]; + xml += ''; + + toArray(rule.allowedOrigin).forEach(parseOrigin); + toArray(rule.allowedMethod).forEach(parseMethod); + toArray(rule.allowedHeader).forEach(parseHeader); + toArray(rule.exposeHeader).forEach(parseExposeHeader); + if (rule.maxAgeSeconds) { + xml += `${rule.maxAgeSeconds}`; + } + xml += ''; + } + xml += ''; + params.content = xml; + params.mime = 'xml'; + params.successStatuses = [200]; + const result = await this.request(params); + return { + res: result.res + }; +}; - this.runInAsyncScope(callback, null, null, { - statusCode, - headers, - socket, - opaque, - context - }) +proto.getBucketCORS = async function getBucketCORS(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('GET', name, 'cors', options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + const rules = []; + if (result.data && result.data.CORSRule) { + let { CORSRule } = result.data; + if (!isArray(CORSRule)) CORSRule = [CORSRule]; + CORSRule.forEach(rule => { + const r = {}; + Object.keys(rule).forEach(key => { + r[key.slice(0, 1).toLowerCase() + key.slice(1, key.length)] = rule[key]; + }); + rules.push(r); + }); } + return { + rules, + res: result.res + }; +}; - onError (err) { - const { callback, opaque } = this +proto.deleteBucketCORS = async function deleteBucketCORS(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('DELETE', name, 'cors', options); + params.successStatuses = [204]; + const result = await this.request(params); + return { + res: result.res + }; +}; - removeSignal(this) +// referer - if (callback) { - this.callback = null - queueMicrotask(() => { - this.runInAsyncScope(callback, null, err, { opaque }) - }) +proto.putBucketReferer = async function putBucketReferer(name, allowEmpty, referers, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('PUT', name, 'referer', options); + let xml = '\n\n'; + xml += ` ${allowEmpty ? 'true' : 'false'}\n`; + if (referers && referers.length > 0) { + xml += ' \n'; + for (let i = 0; i < referers.length; i++) { + xml += ` ${referers[i]}\n`; } + xml += ' \n'; + } else { + xml += ' \n'; } -} + xml += ''; + params.content = xml; + params.mime = 'xml'; + params.successStatuses = [200]; + const result = await this.request(params); + return { + res: result.res + }; +}; -function connect (opts, callback) { - if (callback === undefined) { - return new Promise((resolve, reject) => { - connect.call(this, opts, (err, data) => { - return err ? reject(err) : resolve(data) - }) - }) +proto.getBucketReferer = async function getBucketReferer(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('GET', name, 'referer', options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + let referers = result.data.RefererList.Referer || null; + if (referers) { + if (!isArray(referers)) { + referers = [referers]; + } } + return { + allowEmpty: result.data.AllowEmptyReferer === 'true', + referers, + res: result.res + }; +}; - try { - const connectHandler = new ConnectHandler(opts, callback) - this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler) - } catch (err) { - if (typeof callback !== 'function') { - throw err +proto.deleteBucketReferer = async function deleteBucketReferer(name, options) { + _checkBucketName(name); + return await this.putBucketReferer(name, true, null, options); +}; + +// private apis + +proto._bucketRequestParams = function _bucketRequestParams(method, bucket, subres, options) { + return { + method, + bucket, + subres, + additionalHeaders: options && options.additionalHeaders, + timeout: options && options.timeout, + ctx: options && options.ctx + }; +}; + + +/***/ }), + +/***/ 92399: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const debug = __nccwpck_require__(38237)('ali-oss'); +const sendToWormhole = __nccwpck_require__(98751); +const xml = __nccwpck_require__(66189); +const AgentKeepalive = __nccwpck_require__(34623); +const HttpsAgentKeepalive = (__nccwpck_require__(34623).HttpsAgent); +const merge = __nccwpck_require__(11149); +const platform = __nccwpck_require__(48208); +const utility = __nccwpck_require__(33877); +const urllib = __nccwpck_require__(64783); +const pkg = __nccwpck_require__(82818); +const bowser = __nccwpck_require__(33114); +const signUtils = __nccwpck_require__(5150); +const _initOptions = __nccwpck_require__(99221); +const { createRequest } = __nccwpck_require__(71828); +const { encoder } = __nccwpck_require__(63240); +const { getReqUrl } = __nccwpck_require__(86783); +const { setSTSToken } = __nccwpck_require__(84622); +const { retry } = __nccwpck_require__(41514); +const { isFunction } = __nccwpck_require__(18818); +const { getStandardRegion } = __nccwpck_require__(98423); + +const globalHttpAgent = new AgentKeepalive(); +const globalHttpsAgent = new HttpsAgentKeepalive(); + +function Client(options, ctx) { + if (!(this instanceof Client)) { + return new Client(options, ctx); + } + + if (options && options.inited) { + this.options = options; + } else { + this.options = Client.initOptions(options); + } + + // support custom agent and urllib client + if (this.options.urllib) { + this.urllib = this.options.urllib; + } else { + this.urllib = urllib; + if (this.options.maxSockets) { + globalHttpAgent.maxSockets = this.options.maxSockets; + globalHttpsAgent.maxSockets = this.options.maxSockets; } - const opaque = opts && opts.opaque - queueMicrotask(() => callback(err, { opaque })) + this.agent = this.options.agent || globalHttpAgent; + this.httpsAgent = this.options.httpsAgent || globalHttpsAgent; } + this.ctx = ctx; + this.userAgent = this._getUserAgent(); + this.stsTokenFreshTime = new Date(); } -module.exports = connect +/** + * Expose `Client` + */ +module.exports = Client; -/***/ }), +Client.initOptions = function initOptions(options) { + return _initOptions(options); +}; -/***/ 8752: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/** + * prototype + */ -"use strict"; +const proto = Client.prototype; +/** + * Object operations + */ +merge(proto, __nccwpck_require__(48417)); +merge(proto, __nccwpck_require__(32557)); +merge(proto, __nccwpck_require__(2341)); +/** + * Bucket operations + */ +merge(proto, __nccwpck_require__(82715)); +merge(proto, __nccwpck_require__(93775)); +// multipart upload +merge(proto, __nccwpck_require__(40107)); +/** + * RTMP operations + */ +merge(proto, __nccwpck_require__(59164)); -const { - Readable, - Duplex, - PassThrough -} = __nccwpck_require__(2781) -const { - InvalidArgumentError, - InvalidReturnValueError, - RequestAbortedError -} = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { AsyncResource } = __nccwpck_require__(852) -const { addSignal, removeSignal } = __nccwpck_require__(7032) -const assert = __nccwpck_require__(9491) +/** + * common multipart-copy support node and browser + */ +merge(proto, __nccwpck_require__(5944)); +/** + * Common module parallel + */ +merge(proto, __nccwpck_require__(80986)); +/** + * Multipart operations + */ +merge(proto, __nccwpck_require__(65976)); +/** + * ImageClient class + */ +Client.ImageClient = __nccwpck_require__(30628)(Client); +/** + * Cluster Client class + */ +Client.ClusterClient = __nccwpck_require__(25262)(Client); -const kResume = Symbol('resume') +/** + * STS Client class + */ +Client.STS = __nccwpck_require__(34418); -class PipelineRequest extends Readable { - constructor () { - super({ autoDestroy: true }) +/** + * get OSS signature + * @param {String} stringToSign + * @return {String} the signature + */ +proto.signature = function signature(stringToSign) { + debug('authorization stringToSign: %s', stringToSign); - this[kResume] = null - } + return signUtils.computeSignature(this.options.accessKeySecret, stringToSign, this.options.headerEncoding); +}; - _read () { - const { [kResume]: resume } = this +proto._getReqUrl = getReqUrl; - if (resume) { - this[kResume] = null - resume() - } - } +/** + * get author header + * + * "Authorization: OSS " + Access Key Id + ":" + Signature + * + * Signature = base64(hmac-sha1(Access Key Secret + "\n" + * + VERB + "\n" + * + CONTENT-MD5 + "\n" + * + CONTENT-TYPE + "\n" + * + DATE + "\n" + * + CanonicalizedOSSHeaders + * + CanonicalizedResource)) + * + * @param {String} method + * @param {String} resource + * @param {Object} header + * @return {String} + * + * @api private + */ - _destroy (err, callback) { - this._read() +proto.authorization = function authorization(method, resource, subres, headers) { + const stringToSign = signUtils.buildCanonicalString(method.toUpperCase(), resource, { + headers, + parameters: subres + }); - callback(err) - } -} + return signUtils.authorization( + this.options.accessKeyId, + this.options.accessKeySecret, + stringToSign, + this.options.headerEncoding + ); +}; -class PipelineResponse extends Readable { - constructor (resume) { - super({ autoDestroy: true }) - this[kResume] = resume +/** + * get authorization header v4 + * + * @param {string} method + * @param {Object} requestParams + * @param {Object} requestParams.headers + * @param {Object} [requestParams.queries] + * @param {string} [bucketName] + * @param {string} [objectName] + * @param {string[]} [additionalHeaders] + * @return {string} + * + * @api private + */ +proto.authorizationV4 = function authorizationV4(method, requestParams, bucketName, objectName, additionalHeaders) { + return signUtils.authorizationV4( + this.options.accessKeyId, + this.options.accessKeySecret, + getStandardRegion(this.options.region), + method, + requestParams, + bucketName, + objectName, + additionalHeaders, + this.options.headerEncoding + ); +}; + +/** + * request oss server + * @param {Object} params + * - {String} object + * - {String} bucket + * - {Object} [headers] + * - {Object} [query] + * - {Buffer} [content] + * - {Stream} [stream] + * - {Stream} [writeStream] + * - {String} [mime] + * - {Boolean} [xmlResponse] + * - {Boolean} [customResponse] + * - {Number} [timeout] + * - {Object} [ctx] request context, default is `this.ctx` + * + * @api private + */ + +proto.request = async function (params) { + if (this.options.retryMax) { + return await retry(request.bind(this), this.options.retryMax, { + errorHandler: err => { + const _errHandle = _err => { + if (params.stream) return false; + const statusErr = [-1, -2].includes(_err.status); + const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true); + return statusErr && requestErrorRetryHandle(_err); + }; + if (_errHandle(err)) return true; + return false; + } + })(params); + } else { + return await request.call(this, params); } +}; - _read () { - this[kResume]() +async function request(params) { + if (this.options.stsToken && isFunction(this.options.refreshSTSToken)) { + await setSTSToken.call(this); + } + const reqParams = createRequest.call(this, params); + let result; + let reqErr; + try { + result = await this.urllib.request(reqParams.url, reqParams.params); + debug('response %s %s, got %s, headers: %j', params.method, reqParams.url, result.status, result.headers); + } catch (err) { + reqErr = err; + } + let err; + if (result && params.successStatuses && params.successStatuses.indexOf(result.status) === -1) { + err = await this.requestError(result); + err.params = params; + } else if (reqErr) { + err = await this.requestError(reqErr); } - _destroy (err, callback) { - if (!err && !this._readableState.endEmitted) { - err = new RequestAbortedError() + if (err) { + if (params.customResponse && result && result.res) { + // consume the response stream + await sendToWormhole(result.res); } - callback(err) + if (err.name === 'ResponseTimeoutError') { + err.message = `${ + err.message.split(',')[0] + }, please increase the timeout, see more details at https://github.com/ali-sdk/ali-oss#responsetimeouterror`; + } + if (err.name === 'ConnectionTimeoutError') { + err.message = `${ + err.message.split(',')[0] + }, please increase the timeout or reduce the partSize, see more details at https://github.com/ali-sdk/ali-oss#connectiontimeouterror`; + } + throw err; + } + + if (params.xmlResponse) { + result.data = await this.parseXML(result.data); } + return result; } -class PipelineHandler extends AsyncResource { - constructor (opts, handler) { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') - } +proto._getResource = function _getResource(params) { + let resource = '/'; + if (params.bucket) resource += `${params.bucket}/`; + if (params.object) resource += encoder(params.object, this.options.headerEncoding); - if (typeof handler !== 'function') { - throw new InvalidArgumentError('invalid handler') + return resource; +}; + +proto._escape = function _escape(name) { + return utility.encodeURIComponent(name).replace(/%2F/g, '/'); +}; + +/* + * Get User-Agent for browser & node.js + * @example + * aliyun-sdk-nodejs/4.1.2 Node.js 5.3.0 on Darwin 64-bit + * aliyun-sdk-js/4.1.2 Safari 9.0 on Apple iPhone(iOS 9.2.1) + * aliyun-sdk-js/4.1.2 Chrome 43.0.2357.134 32-bit on Windows Server 2008 R2 / 7 64-bit + */ + +proto._getUserAgent = function _getUserAgent() { + const agent = process && process.browser ? 'js' : 'nodejs'; + const sdk = `aliyun-sdk-${agent}/${pkg.version}`; + let plat = platform.description; + if (!plat && process) { + plat = `Node.js ${process.version.slice(1)} on ${process.platform} ${process.arch}`; + } + + return this._checkUserAgent(`${sdk} ${plat}`); +}; + +proto._checkUserAgent = function _checkUserAgent(ua) { + const userAgent = ua.replace(/\u03b1/, 'alpha').replace(/\u03b2/, 'beta'); + return userAgent; +}; + +/* + * Check Browser And Version + * @param {String} [name] browser name: like IE, Chrome, Firefox + * @param {String} [version] browser major version: like 10(IE 10.x), 55(Chrome 55.x), 50(Firefox 50.x) + * @return {Bool} true or false + * @api private + */ + +proto.checkBrowserAndVersion = function checkBrowserAndVersion(name, version) { + return bowser.name === name && bowser.version.split('.')[0] === version; +}; + +/** + * thunkify xml.parseString + * @param {String|Buffer} str + * + * @api private + */ + +proto.parseXML = function parseXMLThunk(str) { + return new Promise((resolve, reject) => { + if (Buffer.isBuffer(str)) { + str = str.toString(); } + xml.parseString( + str, + { + explicitRoot: false, + explicitArray: false + }, + (err, result) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); +}; - const { signal, method, opaque, onInfo, responseHeaders } = opts +/** + * generater a request error with request response + * @param {Object} result + * + * @api private + */ - if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { - throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') +proto.requestError = async function requestError(result) { + let err = null; + if (result.name === 'ResponseTimeoutError') { + err = new Error(result.message); + err.name = result.name; + } else if (!result.data || !result.data.length) { + if (result.status === -1 || result.status === -2) { + // -1 is net error , -2 is timeout + err = new Error(result.message); + err.name = result.name; + err.status = result.status; + err.code = result.name; + } else { + // HEAD not exists resource + if (result.status === 404) { + err = new Error('Object not exists'); + err.name = 'NoSuchKeyError'; + err.status = 404; + err.code = 'NoSuchKey'; + } else if (result.status === 412) { + err = new Error('Pre condition failed'); + err.name = 'PreconditionFailedError'; + err.status = 412; + err.code = 'PreconditionFailed'; + } else { + err = new Error(`Unknow error, status: ${result.status}`); + err.name = 'UnknownError'; + err.status = result.status; + } + err.requestId = result.headers['x-oss-request-id']; + err.host = ''; } + } else { + const message = String(result.data); + debug('request response error data: %s', message); - if (method === 'CONNECT') { - throw new InvalidArgumentError('invalid method') + let info; + try { + info = (await this.parseXML(message)) || {}; + } catch (error) { + debug(message); + error.message += `\nraw xml: ${message}`; + error.status = result.status; + error.requestId = result.headers['x-oss-request-id']; + return error; } - if (onInfo && typeof onInfo !== 'function') { - throw new InvalidArgumentError('invalid onInfo callback') + let msg = info.Message || `unknow request error, status: ${result.status}`; + if (info.Condition) { + msg += ` (condition: ${info.Condition})`; } + err = new Error(msg); + err.name = info.Code ? `${info.Code}Error` : 'UnknownError'; + err.status = result.status; + err.code = info.Code; + err.requestId = info.RequestId; + err.hostId = info.HostId; + } - super('UNDICI_PIPELINE') + debug('generate error %j', err); + return err; +}; - this.opaque = opaque || null - this.responseHeaders = responseHeaders || null - this.handler = handler - this.abort = null - this.context = null - this.onInfo = onInfo || null +proto.setSLDEnabled = function setSLDEnabled(enable) { + this.options.sldEnable = !!enable; + return this; +}; - this.req = new PipelineRequest().on('error', util.nop) - this.ret = new Duplex({ - readableObjectMode: opts.objectMode, - autoDestroy: true, - read: () => { - const { body } = this +/***/ }), - if (body && body.resume) { - body.resume() - } - }, - write: (chunk, encoding, callback) => { - const { req } = this +/***/ 25262: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (req.push(chunk, encoding) || req._readableState.destroyed) { - callback() - } else { - req[kResume] = callback - } - }, - destroy: (err, callback) => { - const { body, req, res, ret, abort } = this +const Base = __nccwpck_require__(19100); +const util = __nccwpck_require__(73837); +const ready = __nccwpck_require__(95284); +const copy = __nccwpck_require__(30952); +const currentIP = (__nccwpck_require__(90777).ip)(); - if (!err && !ret._readableState.endEmitted) { - err = new RequestAbortedError() - } +const RR = 'roundRobin'; +const MS = 'masterSlave'; - if (abort && err) { - abort() - } +module.exports = function (OssClient) { + function Client(options) { + if (!(this instanceof Client)) { + return new Client(options); + } - util.destroy(body, err) - util.destroy(req, err) - util.destroy(res, err) + if (!options || !Array.isArray(options.cluster)) { + throw new Error('require options.cluster to be an array'); + } - removeSignal(this) + Base.call(this); - callback(err) - } - }).on('prefinish', () => { - const { req } = this + this.clients = []; + this.availables = {}; - // Node < 15 does not call _final in same tick. - req.push(null) - }) + for (let i = 0; i < options.cluster.length; i++) { + const opt = options.cluster[i]; + copy(options).pick('timeout', 'agent', 'urllib').to(opt); + this.clients.push(new OssClient(opt)); + this.availables[i] = true; + } - this.res = null + this.schedule = options.schedule || RR; + // only read from master, default is false + this.masterOnly = !!options.masterOnly; + this.index = 0; - addSignal(this, signal) + const heartbeatInterval = options.heartbeatInterval || 10000; + this._checkAvailableLock = false; + this._timerId = this._deferInterval(this._checkAvailable.bind(this, true), heartbeatInterval); + this._ignoreStatusFile = options.ignoreStatusFile || false; + this._init(); } - onConnect (abort, context) { - const { ret, res } = this + util.inherits(Client, Base); + const proto = Client.prototype; + ready.mixin(proto); - assert(!res, 'pipeline cannot be retried') + const GET_METHODS = ['head', 'get', 'getStream', 'list', 'getACL']; - if (ret.destroyed) { - throw new RequestAbortedError() - } + const PUT_METHODS = ['put', 'putStream', 'delete', 'deleteMulti', 'copy', 'putMeta', 'putACL']; - this.abort = abort - this.context = context - } + GET_METHODS.forEach(method => { + proto[method] = async function (...args) { + const client = this.chooseAvailable(); + let lastError; + try { + return await client[method](...args); + } catch (err) { + if (err.status && err.status >= 200 && err.status < 500) { + // 200 ~ 499 belong to normal response, don't try again + throw err; + } + // < 200 || >= 500 need to retry from other cluser node + lastError = err; + } - onHeaders (statusCode, rawHeaders, resume) { - const { opaque, handler, context } = this + for (let i = 0; i < this.clients.length; i++) { + const c = this.clients[i]; + if (c !== client) { + try { + return await c[method].apply(client, args); + } catch (err) { + if (err.status && err.status >= 200 && err.status < 500) { + // 200 ~ 499 belong to normal response, don't try again + throw err; + } + // < 200 || >= 500 need to retry from other cluser node + lastError = err; + } + } + } - if (statusCode < 200) { - if (this.onInfo) { - const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - this.onInfo({ statusCode, headers }) + lastError.message += ' (all clients are down)'; + throw lastError; + }; + }); + + // must cluster node write success + PUT_METHODS.forEach(method => { + proto[method] = async function (...args) { + const res = await Promise.all(this.clients.map(client => client[method](...args))); + return res[0]; + }; + }); + + proto.signatureUrl = function signatureUrl(/* name */ ...args) { + const client = this.chooseAvailable(); + return client.signatureUrl(...args); + }; + + proto.getObjectUrl = function getObjectUrl(/* name, baseUrl */ ...args) { + const client = this.chooseAvailable(); + return client.getObjectUrl(...args); + }; + + proto._init = function _init() { + const that = this; + (async () => { + await that._checkAvailable(that._ignoreStatusFile); + that.ready(true); + })().catch(err => { + that.emit('error', err); + }); + }; + + proto._checkAvailable = async function _checkAvailable(ignoreStatusFile) { + const name = `._ali-oss/check.status.${currentIP}.txt`; + if (!ignoreStatusFile) { + // only start will try to write the file + await this.put(name, Buffer.from(`check available started at ${Date()}`)); + } + + if (this._checkAvailableLock) { + return; + } + this._checkAvailableLock = true; + const downStatusFiles = []; + for (let i = 0; i < this.clients.length; i++) { + const client = this.clients[i]; + // check 3 times + let available = await this._checkStatus(client, name); + if (!available) { + // check again + available = await this._checkStatus(client, name); } - return + if (!available) { + // check again + /* eslint no-await-in-loop: [0] */ + available = await this._checkStatus(client, name); + if (!available) { + downStatusFiles.push(client._objectUrl(name)); + } + } + this.availables[i] = available; } + this._checkAvailableLock = false; - this.res = new PipelineResponse(resume) + if (downStatusFiles.length > 0) { + const err = new Error( + `${downStatusFiles.length} data node down, please check status file: ${downStatusFiles.join(', ')}` + ); + err.name = 'CheckAvailableError'; + this.emit('error', err); + } + }; - let body + proto._checkStatus = async function _checkStatus(client, name) { + let available = true; try { - this.handler = null - const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - body = this.runInAsyncScope(handler, null, { - statusCode, - headers, - opaque, - body: this.res, - context - }) + await client.head(name); } catch (err) { - this.res.on('error', util.nop) - throw err + // 404 will be available too + if (!err.status || err.status >= 500 || err.status < 200) { + available = false; + } } + return available; + }; - if (!body || typeof body.on !== 'function') { - throw new InvalidReturnValueError('expected Readable') + proto.chooseAvailable = function chooseAvailable() { + if (this.schedule === MS) { + // only read from master + if (this.masterOnly) { + return this.clients[0]; + } + for (let i = 0; i < this.clients.length; i++) { + if (this.availables[i]) { + return this.clients[i]; + } + } + // all down, try to use this first one + return this.clients[0]; } - body - .on('data', (chunk) => { - const { ret, body } = this + // RR + let n = this.clients.length; + while (n > 0) { + const i = this._nextRRIndex(); + if (this.availables[i]) { + return this.clients[i]; + } + n--; + } + // all down, try to use this first one + return this.clients[0]; + }; + + proto._nextRRIndex = function _nextRRIndex() { + const index = this.index++; + if (this.index >= this.clients.length) { + this.index = 0; + } + return index; + }; + + proto._error = function error(err) { + if (err) throw err; + }; + + proto._createCallback = function _createCallback(ctx, gen, cb) { + return () => { + cb = cb || this._error; + gen.call(ctx).then(() => { + cb(); + }, cb); + }; + }; + proto._deferInterval = function _deferInterval(gen, timeout, cb) { + return setInterval(this._createCallback(this, gen, cb), timeout); + }; - if (!ret.push(chunk) && body.pause) { - body.pause() - } - }) - .on('error', (err) => { - const { ret } = this + proto.close = function close() { + clearInterval(this._timerId); + this._timerId = null; + }; - util.destroy(ret, err) - }) - .on('end', () => { - const { ret } = this + return Client; +}; - ret.push(null) - }) - .on('close', () => { - const { ret } = this - - if (!ret._readableState.ended) { - util.destroy(ret, new RequestAbortedError()) - } - }) - this.body = body - } +/***/ }), - onData (chunk) { - const { res } = this - return res.push(chunk) - } +/***/ 18072: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - onComplete (trailers) { - const { res } = this - res.push(null) - } +"use strict"; - onError (err) { - const { ret } = this - this.handler = null - util.destroy(ret, err) - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.abortBucketWorm = void 0; +const checkBucketName_1 = __nccwpck_require__(30922); +async function abortBucketWorm(name, options) { + checkBucketName_1.checkBucketName(name); + const params = this._bucketRequestParams('DELETE', name, 'worm', options); + const result = await this.request(params); + return { + res: result.res, + status: result.status + }; } +exports.abortBucketWorm = abortBucketWorm; -function pipeline (opts, handler) { - try { - const pipelineHandler = new PipelineHandler(opts, handler) - this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler) - return pipelineHandler.ret - } catch (err) { - return new PassThrough().destroy(err) - } + +/***/ }), + +/***/ 46768: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.completeBucketWorm = void 0; +const checkBucketName_1 = __nccwpck_require__(30922); +async function completeBucketWorm(name, wormId, options) { + checkBucketName_1.checkBucketName(name); + const params = this._bucketRequestParams('POST', name, { wormId }, options); + const result = await this.request(params); + return { + res: result.res, + status: result.status + }; } +exports.completeBucketWorm = completeBucketWorm; -module.exports = pipeline + +/***/ }), + +/***/ 79017: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); + +const proto = exports; +// const jstoxml = require('jstoxml'); +/** + * deleteBucketEncryption + * @param {String} bucketName - bucket name + */ + +proto.deleteBucketEncryption = async function deleteBucketEncryption(bucketName) { + _checkBucketName(bucketName); + const params = this._bucketRequestParams('DELETE', bucketName, 'encryption'); + params.successStatuses = [204]; + params.xmlResponse = true; + const result = await this.request(params); + return { + status: result.status, + res: result.res + }; +}; /***/ }), -/***/ 5448: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 96956: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deleteBucketInventory = void 0; +const checkBucketName_1 = __nccwpck_require__(30922); +/** + * deleteBucketInventory + * @param {String} bucketName - bucket name + * @param {String} inventoryId + * @param {Object} options + */ +async function deleteBucketInventory(bucketName, inventoryId, options = {}) { + const subres = Object.assign({ inventory: '', inventoryId }, options.subres); + checkBucketName_1.checkBucketName(bucketName); + const params = this._bucketRequestParams('DELETE', bucketName, subres, options); + params.successStatuses = [204]; + const result = await this.request(params); + return { + status: result.status, + res: result.res + }; +} +exports.deleteBucketInventory = deleteBucketInventory; -const Readable = __nccwpck_require__(3858) -const { - InvalidArgumentError, - RequestAbortedError -} = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { getResolveErrorBodyCallback } = __nccwpck_require__(7474) -const { AsyncResource } = __nccwpck_require__(852) -const { addSignal, removeSignal } = __nccwpck_require__(7032) -class RequestHandler extends AsyncResource { - constructor (opts, callback) { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') - } +/***/ }), - const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts +/***/ 11888: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - try { - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); - if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) { - throw new InvalidArgumentError('invalid highWaterMark') - } +const proto = exports; - if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { - throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') - } +proto.deleteBucketLifecycle = async function deleteBucketLifecycle(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('DELETE', name, 'lifecycle', options); + params.successStatuses = [204]; + const result = await this.request(params); + return { + res: result.res + }; +}; - if (method === 'CONNECT') { - throw new InvalidArgumentError('invalid method') - } - if (onInfo && typeof onInfo !== 'function') { - throw new InvalidArgumentError('invalid onInfo callback') - } +/***/ }), - super('UNDICI_REQUEST') - } catch (err) { - if (util.isStream(body)) { - util.destroy(body.on('error', util.nop), err) - } - throw err - } +/***/ 8585: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - this.responseHeaders = responseHeaders || null - this.opaque = opaque || null - this.callback = callback - this.res = null - this.abort = null - this.body = body - this.trailers = {} - this.context = null - this.onInfo = onInfo || null - this.throwOnError = throwOnError - this.highWaterMark = highWaterMark +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); - if (util.isStream(body)) { - body.on('error', (err) => { - this.onError(err) - }) - } +const proto = exports; +/** + * deleteBucketPolicy + * @param {String} bucketName - bucket name + * @param {Object} options + */ - addSignal(this, signal) - } +proto.deleteBucketPolicy = async function deleteBucketPolicy(bucketName, options = {}) { + _checkBucketName(bucketName); - onConnect (abort, context) { - if (!this.callback) { - throw new RequestAbortedError() - } + const params = this._bucketRequestParams('DELETE', bucketName, 'policy', options); + params.successStatuses = [204]; + const result = await this.request(params); - this.abort = abort - this.context = context - } + return { + status: result.status, + res: result.res + }; +}; - onHeaders (statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this - const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) +/***/ }), - if (statusCode < 200) { - if (this.onInfo) { - this.onInfo({ statusCode, headers }) - } - return - } +/***/ 46130: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers - const contentType = parsedHeaders['content-type'] - const body = new Readable({ resume, abort, contentType, highWaterMark }) +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); - this.callback = null - this.res = body - if (callback !== null) { - if (this.throwOnError && statusCode >= 400) { - this.runInAsyncScope(getResolveErrorBodyCallback, null, - { callback, body, contentType, statusCode, statusMessage, headers } - ) - } else { - this.runInAsyncScope(callback, null, null, { - statusCode, - headers, - trailers: this.trailers, - opaque, - body, - context - }) - } - } - } +const proto = exports; +/** + * deleteBucketTags + * @param {String} name - bucket name + * @param {Object} options + */ - onData (chunk) { - const { res } = this - return res.push(chunk) - } +proto.deleteBucketTags = async function deleteBucketTags(name, options = {}) { + _checkBucketName(name); - onComplete (trailers) { - const { res } = this + const params = this._bucketRequestParams('DELETE', name, 'tagging', options); + params.successStatuses = [204]; + const result = await this.request(params); - removeSignal(this) + return { + status: result.status, + res: result.res + }; +}; - util.parseHeaders(trailers, this.trailers) - res.push(null) - } +/***/ }), - onError (err) { - const { res, callback, body, opaque } = this +/***/ 33069: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - removeSignal(this) +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); - if (callback) { - // TODO: Does this need queueMicrotask? - this.callback = null - queueMicrotask(() => { - this.runInAsyncScope(callback, null, err, { opaque }) - }) - } +const proto = exports; - if (res) { - this.res = null - // Ensure all queued handlers are invoked before destroying res. - queueMicrotask(() => { - util.destroy(res, err) - }) - } +proto.deleteBucketWebsite = async function deleteBucketWebsite(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('DELETE', name, 'website', options); + params.successStatuses = [204]; + const result = await this.request(params); + return { + res: result.res + }; +}; - if (body) { - this.body = null - util.destroy(body, err) - } - } -} -function request (opts, callback) { - if (callback === undefined) { - return new Promise((resolve, reject) => { - request.call(this, opts, (err, data) => { - return err ? reject(err) : resolve(data) - }) - }) - } +/***/ }), - try { - this.dispatch(opts, new RequestHandler(opts, callback)) - } catch (err) { - if (typeof callback !== 'function') { - throw err - } - const opaque = opts && opts.opaque - queueMicrotask(() => callback(err, { opaque })) - } +/***/ 54262: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.extendBucketWorm = void 0; +const checkBucketName_1 = __nccwpck_require__(30922); +const obj2xml_1 = __nccwpck_require__(96846); +async function extendBucketWorm(name, wormId, days, options) { + checkBucketName_1.checkBucketName(name); + const params = this._bucketRequestParams('POST', name, { wormExtend: '', wormId }, options); + const paramlXMLObJ = { + ExtendWormConfiguration: { + RetentionPeriodInDays: days + } + }; + params.mime = 'xml'; + params.content = obj2xml_1.obj2xml(paramlXMLObJ, { headers: true }); + params.successStatuses = [200]; + const result = await this.request(params); + return { + res: result.res, + status: result.status + }; } +exports.extendBucketWorm = extendBucketWorm; -module.exports = request -module.exports.RequestHandler = RequestHandler + +/***/ }), + +/***/ 96639: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); + +const proto = exports; +/** + * getBucketEncryption + * @param {String} bucketName - bucket name + */ + +proto.getBucketEncryption = async function getBucketEncryption(bucketName) { + _checkBucketName(bucketName); + const params = this._bucketRequestParams('GET', bucketName, 'encryption'); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + const encryption = result.data.ApplyServerSideEncryptionByDefault; + return { + encryption, + status: result.status, + res: result.res + }; +}; /***/ }), -/***/ 5395: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 77743: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getBucketInventory = void 0; +const checkBucketName_1 = __nccwpck_require__(30922); +const formatInventoryConfig_1 = __nccwpck_require__(85708); +/** + * getBucketInventory + * @param {String} bucketName - bucket name + * @param {String} inventoryId + * @param {Object} options + */ +async function getBucketInventory(bucketName, inventoryId, options = {}) { + const subres = Object.assign({ inventory: '', inventoryId }, options.subres); + checkBucketName_1.checkBucketName(bucketName); + const params = this._bucketRequestParams('GET', bucketName, subres, options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + return { + status: result.status, + res: result.res, + inventory: formatInventoryConfig_1.formatInventoryConfig(result.data) + }; +} +exports.getBucketInventory = getBucketInventory; -const { finished, PassThrough } = __nccwpck_require__(2781) -const { - InvalidArgumentError, - InvalidReturnValueError, - RequestAbortedError -} = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { getResolveErrorBodyCallback } = __nccwpck_require__(7474) -const { AsyncResource } = __nccwpck_require__(852) -const { addSignal, removeSignal } = __nccwpck_require__(7032) -class StreamHandler extends AsyncResource { - constructor (opts, factory, callback) { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') - } +/***/ }), - const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts +/***/ 34331: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - try { - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { isArray } = __nccwpck_require__(71954); +const { formatObjKey } = __nccwpck_require__(72095); + +const proto = exports; + +proto.getBucketLifecycle = async function getBucketLifecycle(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('GET', name, 'lifecycle', options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + let rules = result.data.Rule || null; + if (rules) { + if (!isArray(rules)) { + rules = [rules]; + } + rules = rules.map(_ => { + if (_.ID) { + _.id = _.ID; + delete _.ID; } - - if (typeof factory !== 'function') { - throw new InvalidArgumentError('invalid factory') + if (_.Tag && !isArray(_.Tag)) { + _.Tag = [_.Tag]; } + return formatObjKey(_, 'firstLowerCase'); + }); + } + return { + rules, + res: result.res + }; +}; - if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { - throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') - } - if (method === 'CONNECT') { - throw new InvalidArgumentError('invalid method') - } +/***/ }), - if (onInfo && typeof onInfo !== 'function') { - throw new InvalidArgumentError('invalid onInfo callback') - } +/***/ 78221: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - super('UNDICI_STREAM') - } catch (err) { - if (util.isStream(body)) { - util.destroy(body.on('error', util.nop), err) - } - throw err - } +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); - this.responseHeaders = responseHeaders || null - this.opaque = opaque || null - this.factory = factory - this.callback = callback - this.res = null - this.abort = null - this.context = null - this.trailers = null - this.body = body - this.onInfo = onInfo || null - this.throwOnError = throwOnError || false +const proto = exports; +/** + * getBucketPolicy + * @param {String} bucketName - bucket name + * @param {Object} options + */ - if (util.isStream(body)) { - body.on('error', (err) => { - this.onError(err) - }) - } +proto.getBucketPolicy = async function getBucketPolicy(bucketName, options = {}) { + _checkBucketName(bucketName); - addSignal(this, signal) - } + const params = this._bucketRequestParams('GET', bucketName, 'policy', options); - onConnect (abort, context) { - if (!this.callback) { - throw new RequestAbortedError() - } + const result = await this.request(params); + params.successStatuses = [200]; + let policy = null; - this.abort = abort - this.context = context + if (result.res.status === 200) { + policy = JSON.parse(result.res.data.toString()); } - onHeaders (statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context, callback, responseHeaders } = this + return { + policy, + status: result.status, + res: result.res + }; +}; - const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - if (statusCode < 200) { - if (this.onInfo) { - this.onInfo({ statusCode, headers }) - } - return - } +/***/ }), - this.factory = null +/***/ 30768: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - let res +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); - if (this.throwOnError && statusCode >= 400) { - const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers - const contentType = parsedHeaders['content-type'] - res = new PassThrough() +const proto = exports; +/** + * getBucketRequestPayment + * @param {String} bucketName - bucket name + * @param {Object} options + */ - this.callback = null - this.runInAsyncScope(getResolveErrorBodyCallback, null, - { callback, body: res, contentType, statusCode, statusMessage, headers } - ) - } else { - if (factory === null) { - return - } +proto.getBucketRequestPayment = async function getBucketRequestPayment(bucketName, options) { + options = options || {}; - res = this.runInAsyncScope(factory, null, { - statusCode, - headers, - opaque, - context - }) + _checkBucketName(bucketName); + const params = this._bucketRequestParams('GET', bucketName, 'requestPayment', options); + params.successStatuses = [200]; + params.xmlResponse = true; - if ( - !res || - typeof res.write !== 'function' || - typeof res.end !== 'function' || - typeof res.on !== 'function' - ) { - throw new InvalidReturnValueError('expected Writable') - } + const result = await this.request(params); - // TODO: Avoid finished. It registers an unnecessary amount of listeners. - finished(res, { readable: false }, (err) => { - const { callback, res, opaque, trailers, abort } = this + return { + status: result.status, + res: result.res, + payer: result.data.Payer + }; +}; - this.res = null - if (err || !res.readable) { - util.destroy(res, err) - } - this.callback = null - this.runInAsyncScope(callback, null, err || null, { opaque, trailers }) +/***/ }), - if (err) { - abort() - } - }) - } +/***/ 85629: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - res.on('drain', resume) +"use strict"; - this.res = res +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getBucketStat = void 0; +const checkBucketName_1 = __nccwpck_require__(30922); +async function getBucketStat(name, options) { + name = name || this.options.bucket; + checkBucketName_1.checkBucketName(name); + const params = this._bucketRequestParams('GET', name, 'stat', options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + return { + res: result.res, + stat: result.data + }; +} +exports.getBucketStat = getBucketStat; - const needDrain = res.writableNeedDrain !== undefined - ? res.writableNeedDrain - : res._writableState && res._writableState.needDrain - return needDrain !== true - } +/***/ }), - onData (chunk) { - const { res } = this +/***/ 47103: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return res ? res.write(chunk) : true - } +const proto = exports; +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { formatTag } = __nccwpck_require__(97115); +/** + * getBucketTags + * @param {String} name - bucket name + * @param {Object} options + * @return {Object} + */ - onComplete (trailers) { - const { res } = this +proto.getBucketTags = async function getBucketTags(name, options = {}) { + _checkBucketName(name); + const params = this._bucketRequestParams('GET', name, 'tagging', options); + params.successStatuses = [200]; + const result = await this.request(params); + const Tagging = await this.parseXML(result.data); - removeSignal(this) + return { + status: result.status, + res: result.res, + tag: formatTag(Tagging) + }; +}; - if (!res) { - return - } - this.trailers = util.parseHeaders(trailers) +/***/ }), - res.end() - } +/***/ 43607: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - onError (err) { - const { res, callback, opaque, body } = this +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); - removeSignal(this) +const proto = exports; +/** + * getBucketVersioning + * @param {String} bucketName - bucket name + */ - this.factory = null +proto.getBucketVersioning = async function getBucketVersioning(bucketName, options) { + _checkBucketName(bucketName); + const params = this._bucketRequestParams('GET', bucketName, 'versioning', options); + params.xmlResponse = true; + params.successStatuses = [200]; + const result = await this.request(params); - if (res) { - this.res = null - util.destroy(res, err) - } else if (callback) { - this.callback = null - queueMicrotask(() => { - this.runInAsyncScope(callback, null, err, { opaque }) - }) - } + const versionStatus = result.data.Status; + return { + status: result.status, + versionStatus, + res: result.res + }; +}; - if (body) { - this.body = null - util.destroy(body, err) - } - } -} -function stream (opts, factory, callback) { - if (callback === undefined) { - return new Promise((resolve, reject) => { - stream.call(this, opts, factory, (err, data) => { - return err ? reject(err) : resolve(data) - }) - }) - } +/***/ }), - try { - this.dispatch(opts, new StreamHandler(opts, factory, callback)) - } catch (err) { - if (typeof callback !== 'function') { - throw err +/***/ 13570: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { isObject } = __nccwpck_require__(81269); + +const proto = exports; + +proto.getBucketWebsite = async function getBucketWebsite(name, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('GET', name, 'website', options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + let routingRules = []; + if (result.data.RoutingRules && result.data.RoutingRules.RoutingRule) { + if (isObject(result.data.RoutingRules.RoutingRule)) { + routingRules = [result.data.RoutingRules.RoutingRule]; + } else { + routingRules = result.data.RoutingRules.RoutingRule; } - const opaque = opts && opts.opaque - queueMicrotask(() => callback(err, { opaque })) } -} - -module.exports = stream + return { + index: (result.data.IndexDocument && result.data.IndexDocument.Suffix) || '', + supportSubDir: (result.data.IndexDocument && result.data.IndexDocument.SupportSubDir) || 'false', + type: result.data.IndexDocument && result.data.IndexDocument.Type, + routingRules, + error: (result.data.ErrorDocument && result.data.ErrorDocument.Key) || null, + res: result.res + }; +}; /***/ }), -/***/ 6923: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 31539: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getBucketWorm = void 0; +const checkBucketName_1 = __nccwpck_require__(30922); +const dataFix_1 = __nccwpck_require__(61615); +async function getBucketWorm(name, options) { + checkBucketName_1.checkBucketName(name); + const params = this._bucketRequestParams('GET', name, 'worm', options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + dataFix_1.dataFix(result.data, { + lowerFirst: true, + rename: { + RetentionPeriodInDays: 'days' + } + }); + return Object.assign(Object.assign({}, result.data), { res: result.res, status: result.status }); +} +exports.getBucketWorm = getBucketWorm; -const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8045) -const { AsyncResource } = __nccwpck_require__(852) -const util = __nccwpck_require__(3983) -const { addSignal, removeSignal } = __nccwpck_require__(7032) -const assert = __nccwpck_require__(9491) -class UpgradeHandler extends AsyncResource { - constructor (opts, callback) { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('invalid opts') - } +/***/ }), - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } +/***/ 82715: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const { signal, opaque, responseHeaders } = opts +const merge = __nccwpck_require__(11149); + +const proto = exports; + +merge(proto, __nccwpck_require__(30768)); +merge(proto, __nccwpck_require__(54392)); +merge(proto, __nccwpck_require__(73530)); +merge(proto, __nccwpck_require__(96639)); +merge(proto, __nccwpck_require__(79017)); +merge(proto, __nccwpck_require__(47103)); +merge(proto, __nccwpck_require__(45149)); +merge(proto, __nccwpck_require__(46130)); +merge(proto, __nccwpck_require__(60979)); +merge(proto, __nccwpck_require__(13570)); +merge(proto, __nccwpck_require__(24979)); +merge(proto, __nccwpck_require__(33069)); +merge(proto, __nccwpck_require__(34331)); +merge(proto, __nccwpck_require__(57812)); +merge(proto, __nccwpck_require__(11888)); +merge(proto, __nccwpck_require__(78221)); +merge(proto, __nccwpck_require__(59663)); +merge(proto, __nccwpck_require__(8585)); +merge(proto, __nccwpck_require__(43607)); +merge(proto, __nccwpck_require__(33973)); +merge(proto, __nccwpck_require__(77743)); +merge(proto, __nccwpck_require__(96956)); +merge(proto, __nccwpck_require__(57981)); +merge(proto, __nccwpck_require__(50113)); +merge(proto, __nccwpck_require__(18072)); +merge(proto, __nccwpck_require__(46768)); +merge(proto, __nccwpck_require__(54262)); +merge(proto, __nccwpck_require__(31539)); +merge(proto, __nccwpck_require__(93874)); +merge(proto, __nccwpck_require__(85629)); - if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { - throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') - } - super('UNDICI_UPGRADE') +/***/ }), - this.responseHeaders = responseHeaders || null - this.opaque = opaque || null - this.callback = callback - this.abort = null - this.context = null +/***/ 93874: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - addSignal(this, signal) - } +"use strict"; - onConnect (abort, context) { - if (!this.callback) { - throw new RequestAbortedError() - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.initiateBucketWorm = void 0; +const obj2xml_1 = __nccwpck_require__(96846); +const checkBucketName_1 = __nccwpck_require__(30922); +async function initiateBucketWorm(name, days, options) { + checkBucketName_1.checkBucketName(name); + const params = this._bucketRequestParams('POST', name, 'worm', options); + const paramlXMLObJ = { + InitiateWormConfiguration: { + RetentionPeriodInDays: days + } + }; + params.mime = 'xml'; + params.content = obj2xml_1.obj2xml(paramlXMLObJ, { headers: true }); + params.successStatuses = [200]; + const result = await this.request(params); + return { + res: result.res, + wormId: result.res.headers['x-oss-worm-id'], + status: result.status + }; +} +exports.initiateBucketWorm = initiateBucketWorm; - this.abort = abort - this.context = null - } - onHeaders () { - throw new SocketError('bad upgrade', null) - } +/***/ }), - onUpgrade (statusCode, rawHeaders, socket) { - const { callback, opaque, context } = this +/***/ 57981: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - assert.strictEqual(statusCode, 101) +"use strict"; - removeSignal(this) +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.listBucketInventory = void 0; +const checkBucketName_1 = __nccwpck_require__(30922); +const formatInventoryConfig_1 = __nccwpck_require__(85708); +/** + * listBucketInventory + * @param {String} bucketName - bucket name + * @param {String} inventoryId + * @param {Object} options + */ +async function listBucketInventory(bucketName, options = {}) { + const { continuationToken } = options; + const subres = Object.assign({ inventory: '' }, continuationToken && { 'continuation-token': continuationToken }, options.subres); + checkBucketName_1.checkBucketName(bucketName); + const params = this._bucketRequestParams('GET', bucketName, subres, options); + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + const { data, res, status } = result; + return { + isTruncated: data.IsTruncated === 'true', + nextContinuationToken: data.NextContinuationToken, + inventoryList: formatInventoryConfig_1.formatInventoryConfig(data.InventoryConfiguration, true), + status, + res + }; +} +exports.listBucketInventory = listBucketInventory; - this.callback = null - const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) - this.runInAsyncScope(callback, null, null, { - headers, - socket, - opaque, - context - }) - } - onError (err) { - const { callback, opaque } = this +/***/ }), - removeSignal(this) +/***/ 60979: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (callback) { - this.callback = null - queueMicrotask(() => { - this.runInAsyncScope(callback, null, err, { opaque }) - }) - } - } -} +const proto = exports; +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { obj2xml } = __nccwpck_require__(96846); + +proto.putBucket = async function putBucket(name, options = {}) { + _checkBucketName(name, true); + const params = this._bucketRequestParams('PUT', name, '', options); + + const CreateBucketConfiguration = {}; + const paramlXMLObJ = { + CreateBucketConfiguration + }; + + const storageClass = options.StorageClass || options.storageClass; + const dataRedundancyType = options.DataRedundancyType || options.dataRedundancyType; + if (storageClass || dataRedundancyType) { + storageClass && (CreateBucketConfiguration.StorageClass = storageClass); + dataRedundancyType && (CreateBucketConfiguration.DataRedundancyType = dataRedundancyType); + params.mime = 'xml'; + params.content = obj2xml(paramlXMLObJ, { headers: true }); + } + const { acl, headers = {} } = options; + acl && (headers['x-oss-acl'] = acl); + params.headers = headers; + params.successStatuses = [200]; + const result = await this.request(params); + return { + bucket: (result.headers.location && result.headers.location.substring(1)) || null, + res: result.res + }; +}; -function upgrade (opts, callback) { - if (callback === undefined) { - return new Promise((resolve, reject) => { - upgrade.call(this, opts, (err, data) => { - return err ? reject(err) : resolve(data) - }) - }) - } - try { - const upgradeHandler = new UpgradeHandler(opts, callback) - this.dispatch({ - ...opts, - method: opts.method || 'GET', - upgrade: opts.protocol || 'Websocket' - }, upgradeHandler) - } catch (err) { - if (typeof callback !== 'function') { - throw err +/***/ }), + +/***/ 73530: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +const proto = exports; +// const jstoxml = require('jstoxml'); +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { obj2xml } = __nccwpck_require__(96846); +/** + * putBucketEncryption + * @param {String} bucketName - bucket name + * @param {Object} options + */ + +proto.putBucketEncryption = async function putBucketEncryption(bucketName, options) { + options = options || {}; + _checkBucketName(bucketName); + const params = this._bucketRequestParams('PUT', bucketName, 'encryption', options); + params.successStatuses = [200]; + const paramXMLObj = { + ServerSideEncryptionRule: { + ApplyServerSideEncryptionByDefault: { + SSEAlgorithm: options.SSEAlgorithm + } } - const opaque = opts && opts.opaque - queueMicrotask(() => callback(err, { opaque })) + }; + if (options.KMSMasterKeyID !== undefined) { + paramXMLObj.ServerSideEncryptionRule.ApplyServerSideEncryptionByDefault.KMSMasterKeyID = options.KMSMasterKeyID; } -} - -module.exports = upgrade + const paramXML = obj2xml(paramXMLObj, { + headers: true + }); + params.mime = 'xml'; + params.content = paramXML; + const result = await this.request(params); + return { + status: result.status, + res: result.res + }; +}; /***/ }), -/***/ 4059: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 50113: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -module.exports.request = __nccwpck_require__(5448) -module.exports.stream = __nccwpck_require__(5395) -module.exports.pipeline = __nccwpck_require__(8752) -module.exports.upgrade = __nccwpck_require__(6923) -module.exports.connect = __nccwpck_require__(9744) +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.putBucketInventory = void 0; +const checkBucketName_1 = __nccwpck_require__(30922); +const obj2xml_1 = __nccwpck_require__(96846); +/** + * putBucketInventory + * @param {String} bucketName - bucket name + * @param {Inventory} inventory + * @param {Object} options + */ +async function putBucketInventory(bucketName, inventory, options = {}) { + const subres = Object.assign({ inventory: '', inventoryId: inventory.id }, options.subres); + checkBucketName_1.checkBucketName(bucketName); + const { OSSBucketDestination, optionalFields, includedObjectVersions } = inventory; + const destinationBucketPrefix = 'acs:oss:::'; + const rolePrefix = `acs:ram::${OSSBucketDestination.accountId}:role/`; + const paramXMLObj = { + InventoryConfiguration: { + Id: inventory.id, + IsEnabled: inventory.isEnabled, + Filter: { + Prefix: inventory.prefix || '' + }, + Destination: { + OSSBucketDestination: { + Format: OSSBucketDestination.format, + AccountId: OSSBucketDestination.accountId, + RoleArn: `${rolePrefix}${OSSBucketDestination.rolename}`, + Bucket: `${destinationBucketPrefix}${OSSBucketDestination.bucket}`, + Prefix: OSSBucketDestination.prefix || '', + Encryption: OSSBucketDestination.encryption || '' + } + }, + Schedule: { + Frequency: inventory.frequency + }, + IncludedObjectVersions: includedObjectVersions, + OptionalFields: { + Field: (optionalFields === null || optionalFields === void 0 ? void 0 : optionalFields.field) || [] + } + } + }; + const paramXML = obj2xml_1.obj2xml(paramXMLObj, { + headers: true, + firstUpperCase: true + }); + const params = this._bucketRequestParams('PUT', bucketName, subres, options); + params.successStatuses = [200]; + params.mime = 'xml'; + params.content = paramXML; + const result = await this.request(params); + return { + status: result.status, + res: result.res + }; +} +exports.putBucketInventory = putBucketInventory; /***/ }), -/***/ 3858: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 57812: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; -// Ported from https://github.com/nodejs/undici/pull/907 +/* eslint-disable no-use-before-define */ +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { isArray } = __nccwpck_require__(71954); +const { deepCopy } = __nccwpck_require__(97150); +const { isObject } = __nccwpck_require__(81269); +const { obj2xml } = __nccwpck_require__(96846); +const { checkObjectTag } = __nccwpck_require__(40849); +const { getStrBytesCount } = __nccwpck_require__(70365); +const proto = exports; +proto.putBucketLifecycle = async function putBucketLifecycle(name, rules, options) { + _checkBucketName(name); -const assert = __nccwpck_require__(9491) -const { Readable } = __nccwpck_require__(2781) -const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = __nccwpck_require__(8045) -const util = __nccwpck_require__(3983) -const { ReadableStreamFrom, toUSVString } = __nccwpck_require__(3983) + if (!isArray(rules)) { + throw new Error('rules must be Array'); + } -let Blob + const params = this._bucketRequestParams('PUT', name, 'lifecycle', options); + const Rule = []; + const paramXMLObj = { + LifecycleConfiguration: { + Rule + } + }; -const kConsume = Symbol('kConsume') -const kReading = Symbol('kReading') -const kBody = Symbol('kBody') -const kAbort = Symbol('abort') -const kContentType = Symbol('kContentType') + rules.forEach(_ => { + defaultDaysAndDate2Expiration(_); // todo delete, 兼容旧版本 + checkRule(_); + if (_.id) { + _.ID = _.id; + delete _.id; + } + Rule.push(_); + }); -const noop = () => {} + const paramXML = obj2xml(paramXMLObj, { + headers: true, + firstUpperCase: true + }); -module.exports = class BodyReadable extends Readable { - constructor ({ - resume, - abort, - contentType = '', - highWaterMark = 64 * 1024 // Same as nodejs fs streams. - }) { - super({ - autoDestroy: true, - read: resume, - highWaterMark - }) + params.content = paramXML; + params.mime = 'xml'; + params.successStatuses = [200]; + const result = await this.request(params); + return { + res: result.res + }; +}; - this._readableState.dataEmitted = false +// todo delete, 兼容旧版本 +function defaultDaysAndDate2Expiration(obj) { + if (obj.days) { + obj.expiration = { + days: obj.days + }; + } + if (obj.date) { + obj.expiration = { + createdBeforeDate: obj.date + }; + } +} - this[kAbort] = abort - this[kConsume] = null - this[kBody] = null - this[kContentType] = contentType +function checkDaysAndDate(obj, key) { + const { days, createdBeforeDate } = obj; + if (!days && !createdBeforeDate) { + throw new Error(`${key} must includes days or createdBeforeDate`); + } else if (days && !/^[1-9][0-9]*$/.test(days)) { + throw new Error('days must be a positive integer'); + } else if (createdBeforeDate && !/\d{4}-\d{2}-\d{2}T00:00:00.000Z/.test(createdBeforeDate)) { + throw new Error('createdBeforeDate must be date and conform to iso8601 format'); + } +} - // Is stream being consumed through Readable API? - // This is an optimization so that we avoid checking - // for 'data' and 'readable' listeners in the hot path - // inside push(). - this[kReading] = false +function handleCheckTag(tag) { + if (!isArray(tag) && !isObject(tag)) { + throw new Error('tag must be Object or Array'); } + tag = isObject(tag) ? [tag] : tag; + const tagObj = {}; + const tagClone = deepCopy(tag); + tagClone.forEach(v => { + tagObj[v.key] = v.value; + }); - destroy (err) { - if (this.destroyed) { - // Node < 16 - return this - } + checkObjectTag(tagObj); +} - if (!err && !this._readableState.endEmitted) { - err = new RequestAbortedError() - } +function checkRule(rule) { + if (rule.id && getStrBytesCount(rule.id) > 255) throw new Error('ID is composed of 255 bytes at most'); - if (err) { - this[kAbort]() - } + if (rule.prefix === undefined) throw new Error('Rule must includes prefix'); - return super.destroy(err) + if (!['Enabled', 'Disabled'].includes(rule.status)) throw new Error('Status must be Enabled or Disabled'); + + if (rule.transition) { + if (!['IA', 'Archive'].includes(rule.transition.storageClass)) + throw new Error('StorageClass must be IA or Archive'); + checkDaysAndDate(rule.transition, 'Transition'); } - emit (ev, ...args) { - if (ev === 'data') { - // Node < 16.7 - this._readableState.dataEmitted = true - } else if (ev === 'error') { - // Node < 16 - this._readableState.errorEmitted = true + if (rule.expiration) { + if (!rule.expiration.expiredObjectDeleteMarker) { + checkDaysAndDate(rule.expiration, 'Expiration'); + } else if (rule.expiration.days || rule.expiration.createdBeforeDate) { + throw new Error('expiredObjectDeleteMarker cannot be used with days or createdBeforeDate'); } - return super.emit(ev, ...args) } - on (ev, ...args) { - if (ev === 'data' || ev === 'readable') { - this[kReading] = true - } - return super.on(ev, ...args) + if (rule.abortMultipartUpload) { + checkDaysAndDate(rule.abortMultipartUpload, 'AbortMultipartUpload'); } - addListener (ev, ...args) { - return this.on(ev, ...args) + if (!rule.expiration && !rule.abortMultipartUpload && !rule.transition && !rule.noncurrentVersionTransition) { + throw new Error( + 'Rule must includes expiration or abortMultipartUpload or transition or noncurrentVersionTransition' + ); } - off (ev, ...args) { - const ret = super.off(ev, ...args) - if (ev === 'data' || ev === 'readable') { - this[kReading] = ( - this.listenerCount('data') > 0 || - this.listenerCount('readable') > 0 - ) + if (rule.tag) { + if (rule.abortMultipartUpload) { + throw new Error('Tag cannot be used with abortMultipartUpload'); } - return ret + handleCheckTag(rule.tag); } +} - removeListener (ev, ...args) { - return this.off(ev, ...args) - } - push (chunk) { - if (this[kConsume] && chunk !== null && this.readableLength === 0) { - consumePush(this[kConsume], chunk) - return this[kReading] ? super.push(chunk) : true - } - return super.push(chunk) - } +/***/ }), - // https://fetch.spec.whatwg.org/#dom-body-text - async text () { - return consume(this, 'text') - } +/***/ 59663: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // https://fetch.spec.whatwg.org/#dom-body-json - async json () { - return consume(this, 'json') - } +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { policy2Str } = __nccwpck_require__(30061); +const { isObject } = __nccwpck_require__(81269); - // https://fetch.spec.whatwg.org/#dom-body-blob - async blob () { - return consume(this, 'blob') - } +const proto = exports; +/** + * putBucketPolicy + * @param {String} bucketName - bucket name + * @param {Object} policy - bucket policy + * @param {Object} options + */ - // https://fetch.spec.whatwg.org/#dom-body-arraybuffer - async arrayBuffer () { - return consume(this, 'arrayBuffer') - } +proto.putBucketPolicy = async function putBucketPolicy(bucketName, policy, options = {}) { + _checkBucketName(bucketName); - // https://fetch.spec.whatwg.org/#dom-body-formdata - async formData () { - // TODO: Implement. - throw new NotSupportedError() + if (!isObject(policy)) { + throw new Error('policy is not Object'); } + const params = this._bucketRequestParams('PUT', bucketName, 'policy', options); + params.content = policy2Str(policy); + params.successStatuses = [200]; + const result = await this.request(params); + return { + status: result.status, + res: result.res + }; +}; - // https://fetch.spec.whatwg.org/#dom-body-bodyused - get bodyUsed () { - return util.isDisturbed(this) + +/***/ }), + +/***/ 54392: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { obj2xml } = __nccwpck_require__(96846); + +const proto = exports; +/** + * putBucketRequestPayment + * @param {String} bucketName + * @param {String} payer + * @param {Object} options + */ +const payerAll = ['BucketOwner', 'Requester']; + +proto.putBucketRequestPayment = async function putBucketRequestPayment(bucketName, payer, options) { + options = options || {}; + if (!payer || payerAll.indexOf(payer) < 0) { + throw new Error('payer must be BucketOwner or Requester'); } - // https://fetch.spec.whatwg.org/#dom-body-body - get body () { - if (!this[kBody]) { - this[kBody] = ReadableStreamFrom(this) - if (this[kConsume]) { - // TODO: Is this the best way to force a lock? - this[kBody].getReader() // Ensure stream is locked. - assert(this[kBody].locked) - } + _checkBucketName(bucketName); + const params = this._bucketRequestParams('PUT', bucketName, 'requestPayment', options); + params.successStatuses = [200]; + + const paramXMLObj = { + RequestPaymentConfiguration: { + Payer: payer } - return this[kBody] - } + }; + const paramXML = obj2xml(paramXMLObj, { + headers: true + }); - dump (opts) { - let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 - const signal = opts && opts.signal + params.mime = 'xml'; + params.content = paramXML; - if (signal) { - try { - if (typeof signal !== 'object' || !('aborted' in signal)) { - throw new InvalidArgumentError('signal must be an AbortSignal') - } - util.throwIfAborted(signal) - } catch (err) { - return Promise.reject(err) + const result = await this.request(params); + return { + status: result.status, + res: result.res + }; +}; + + +/***/ }), + +/***/ 45149: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { obj2xml } = __nccwpck_require__(96846); +const { checkBucketTag } = __nccwpck_require__(38694); + +const proto = exports; +/** + * putBucketTags + * @param {String} name - bucket name + * @param {Object} tag - bucket tag, eg: `{a: "1", b: "2"}` + * @param {Object} options + */ + +proto.putBucketTags = async function putBucketTags(name, tag, options = {}) { + _checkBucketName(name); + checkBucketTag(tag); + const params = this._bucketRequestParams('PUT', name, 'tagging', options); + params.successStatuses = [200]; + tag = Object.keys(tag).map(key => ({ + Key: key, + Value: tag[key] + })); + + const paramXMLObj = { + Tagging: { + TagSet: { + Tag: tag } } + }; - if (this.closed) { - return Promise.resolve(null) - } + params.mime = 'xml'; + params.content = obj2xml(paramXMLObj); - return new Promise((resolve, reject) => { - const signalListenerCleanup = signal - ? util.addAbortListener(signal, () => { - this.destroy() - }) - : noop + const result = await this.request(params); + return { + res: result.res, + status: result.status + }; +}; - this - .on('close', function () { - signalListenerCleanup() - if (signal && signal.aborted) { - reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) - } else { - resolve(null) - } - }) - .on('error', noop) - .on('data', function (chunk) { - limit -= chunk.length - if (limit <= 0) { - this.destroy() - } - }) - .resume() - }) - } -} -// https://streams.spec.whatwg.org/#readablestream-locked -function isLocked (self) { - // Consume is an implicit lock. - return (self[kBody] && self[kBody].locked === true) || self[kConsume] -} +/***/ }), -// https://fetch.spec.whatwg.org/#body-unusable -function isUnusable (self) { - return util.isDisturbed(self) || isLocked(self) -} +/***/ 33973: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -async function consume (stream, type) { - if (isUnusable(stream)) { - throw new TypeError('unusable') - } +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { obj2xml } = __nccwpck_require__(96846); - assert(!stream[kConsume]) +const proto = exports; +/** + * putBucketVersioning + * @param {String} name - bucket name + * @param {String} status + * @param {Object} options + */ - return new Promise((resolve, reject) => { - stream[kConsume] = { - type, - stream, - resolve, - reject, - length: 0, - body: [] +proto.putBucketVersioning = async function putBucketVersioning(name, status, options = {}) { + _checkBucketName(name); + if (!['Enabled', 'Suspended'].includes(status)) { + throw new Error('status must be Enabled or Suspended'); + } + const params = this._bucketRequestParams('PUT', name, 'versioning', options); + + const paramXMLObj = { + VersioningConfiguration: { + Status: status } + }; - stream - .on('error', function (err) { - consumeFinish(this[kConsume], err) - }) - .on('close', function () { - if (this[kConsume].body !== null) { - consumeFinish(this[kConsume], new RequestAbortedError()) - } - }) + params.mime = 'xml'; + params.content = obj2xml(paramXMLObj, { + headers: true + }); - process.nextTick(consumeStart, stream[kConsume]) - }) -} + const result = await this.request(params); + return { + res: result.res, + status: result.status + }; +}; -function consumeStart (consume) { - if (consume.body === null) { - return - } - const { _readableState: state } = consume.stream +/***/ }), - for (const chunk of state.buffer) { - consumePush(consume, chunk) +/***/ 24979: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { obj2xml } = __nccwpck_require__(96846); +const { isArray } = __nccwpck_require__(71954); + +const proto = exports; +proto.putBucketWebsite = async function putBucketWebsite(name, config = {}, options) { + _checkBucketName(name); + const params = this._bucketRequestParams('PUT', name, 'website', options); + const IndexDocument = { + Suffix: config.index || 'index.html' + }; + const WebsiteConfiguration = { + IndexDocument + }; + let website = { + WebsiteConfiguration + }; + + if (config.supportSubDir) { + IndexDocument.SupportSubDir = config.supportSubDir; } - if (state.endEmitted) { - consumeEnd(this[kConsume]) - } else { - consume.stream.on('end', function () { - consumeEnd(this[kConsume]) - }) + if (config.type) { + IndexDocument.Type = config.type; } - consume.stream.resume() + if (config.error) { + WebsiteConfiguration.ErrorDocument = { + Key: config.error + }; + } - while (consume.stream.read() != null) { - // Loop + if (config.routingRules !== undefined) { + if (!isArray(config.routingRules)) { + throw new Error('RoutingRules must be Array'); + } + WebsiteConfiguration.RoutingRules = { + RoutingRule: config.routingRules + }; } -} -function consumeEnd (consume) { - const { type, body, resolve, stream, length } = consume + website = obj2xml(website); + params.content = website; + params.mime = 'xml'; + params.successStatuses = [200]; + const result = await this.request(params); + return { + res: result.res + }; +}; - try { - if (type === 'text') { - resolve(toUSVString(Buffer.concat(body))) - } else if (type === 'json') { - resolve(JSON.parse(Buffer.concat(body))) - } else if (type === 'arrayBuffer') { - const dst = new Uint8Array(length) - let pos = 0 - for (const buf of body) { - dst.set(buf, pos) - pos += buf.byteLength +/***/ }), + +/***/ 26725: +/***/ ((__unused_webpack_module, exports) => { + +exports.encodeCallback = function encodeCallback(reqParams, options) { + reqParams.headers = reqParams.headers || {}; + if (!Object.prototype.hasOwnProperty.call(reqParams.headers, 'x-oss-callback')) { + if (options.callback) { + const json = { + callbackUrl: encodeURI(options.callback.url), + callbackBody: options.callback.body + }; + if (options.callback.host) { + json.callbackHost = options.callback.host; + } + if (options.callback.contentType) { + json.callbackBodyType = options.callback.contentType; } + const callback = Buffer.from(JSON.stringify(json)).toString('base64'); + reqParams.headers['x-oss-callback'] = callback; - resolve(dst.buffer) - } else if (type === 'blob') { - if (!Blob) { - Blob = (__nccwpck_require__(4300).Blob) + if (options.callback.customValue) { + const callbackVar = {}; + Object.keys(options.callback.customValue).forEach(key => { + callbackVar[`x:${key}`] = options.callback.customValue[key].toString(); + }); + reqParams.headers['x-oss-callback-var'] = Buffer.from(JSON.stringify(callbackVar)).toString('base64'); } - resolve(new Blob(body, { type: stream[kContentType] })) } - - consumeFinish(consume) - } catch (err) { - stream.destroy(err) } -} +}; -function consumePush (consume, chunk) { - consume.length += chunk.length - consume.body.push(chunk) -} -function consumeFinish (consume, err) { - if (consume.body === null) { - return - } +/***/ }), - if (err) { - consume.reject(err) - } else { - consume.resolve() - } +/***/ 86783: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - consume.type = null - consume.stream = null - consume.resolve = null - consume.reject = null - consume.length = 0 - consume.body = null +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getReqUrl = void 0; +const copy_to_1 = __importDefault(__nccwpck_require__(30952)); +const url_1 = __importDefault(__nccwpck_require__(57310)); +const merge_descriptors_1 = __importDefault(__nccwpck_require__(11149)); +const is_type_of_1 = __importDefault(__nccwpck_require__(65565)); +const isIP_1 = __nccwpck_require__(42439); +const checkConfigValid_1 = __nccwpck_require__(27324); +function getReqUrl(params) { + const ep = {}; + const isCname = this.options.cname; + checkConfigValid_1.checkConfigValid(this.options.endpoint, 'endpoint'); + copy_to_1.default(this.options.endpoint, false).to(ep); + if (params.bucket && !isCname && !isIP_1.isIP(ep.hostname) && !this.options.sldEnable) { + ep.host = `${params.bucket}.${ep.host}`; + } + let resourcePath = '/'; + if (params.bucket && this.options.sldEnable) { + resourcePath += `${params.bucket}/`; + } + if (params.object) { + // Preserve '/' in result url + resourcePath += this._escape(params.object).replace(/\+/g, '%2B'); + } + ep.pathname = resourcePath; + const query = {}; + if (params.query) { + merge_descriptors_1.default(query, params.query); + } + if (params.subres) { + let subresAsQuery = {}; + if (is_type_of_1.default.string(params.subres)) { + subresAsQuery[params.subres] = ''; + } + else if (is_type_of_1.default.array(params.subres)) { + params.subres.forEach(k => { + subresAsQuery[k] = ''; + }); + } + else { + subresAsQuery = params.subres; + } + merge_descriptors_1.default(query, subresAsQuery); + } + ep.query = query; + return url_1.default.format(ep); } +exports.getReqUrl = getReqUrl; /***/ }), -/***/ 7474: +/***/ 99221: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const assert = __nccwpck_require__(9491) -const { - ResponseStatusCodeError -} = __nccwpck_require__(8045) -const { toUSVString } = __nccwpck_require__(3983) +const ms = __nccwpck_require__(10845); +const urlutil = __nccwpck_require__(57310); +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const { setRegion } = __nccwpck_require__(16690); +const { checkConfigValid } = __nccwpck_require__(27324); -async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) { - assert(body) +function setEndpoint(endpoint, secure) { + checkConfigValid(endpoint, 'endpoint'); + let url = urlutil.parse(endpoint); - let chunks = [] - let limit = 0 + if (!url.protocol) { + url = urlutil.parse(`http${secure ? 's' : ''}://${endpoint}`); + } - for await (const chunk of body) { - chunks.push(chunk) - limit += chunk.length - if (limit > 128 * 1024) { - chunks = null - break - } + if (url.protocol !== 'http:' && url.protocol !== 'https:') { + throw new Error('Endpoint protocol must be http or https.'); } - if (statusCode === 204 || !contentType || !chunks) { - process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) - return + return url; +} + +module.exports = function (options) { + if (!options || !options.accessKeyId || !options.accessKeySecret) { + throw new Error('require accessKeyId, accessKeySecret'); + } + if (options.stsToken && !options.refreshSTSToken && !options.refreshSTSTokenInterval) { + console.warn( + "It's recommended to set 'refreshSTSToken' and 'refreshSTSTokenInterval' to refresh" + + ' stsToken、accessKeyId、accessKeySecret automatically when sts token has expired' + ); } + if (options.bucket) { + _checkBucketName(options.bucket); + } + const opts = Object.assign( + { + region: 'oss-cn-hangzhou', + internal: false, + secure: false, + timeout: 60000, + bucket: null, + endpoint: null, + cname: false, + isRequestPay: false, + sldEnable: false, + headerEncoding: 'utf-8', + refreshSTSToken: null, + refreshSTSTokenInterval: 60000 * 5, + retryMax: 0, + authorizationV4: false // 启用v4签名,默认关闭 + }, + options + ); - try { - if (contentType.startsWith('application/json')) { - const payload = JSON.parse(toUSVString(Buffer.concat(chunks))) - process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) - return - } + opts.accessKeyId = opts.accessKeyId.trim(); + opts.accessKeySecret = opts.accessKeySecret.trim(); - if (contentType.startsWith('text/')) { - const payload = toUSVString(Buffer.concat(chunks)) - process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) - return - } - } catch (err) { - // Process in a fallback if error + if (opts.timeout) { + opts.timeout = ms(opts.timeout); } - process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) -} + if (opts.endpoint) { + opts.endpoint = setEndpoint(opts.endpoint, opts.secure); + } else if (opts.region) { + opts.endpoint = setRegion(opts.region, opts.internal, opts.secure); + } else { + throw new Error('require options.endpoint or options.region'); + } -module.exports = { getResolveErrorBodyCallback } + opts.inited = true; + return opts; +}; /***/ }), -/***/ 7931: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 2341: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; +const merge = __nccwpck_require__(11149); +const proto = exports; -const { - BalancedPoolMissingUpstreamError, - InvalidArgumentError -} = __nccwpck_require__(8045) -const { - PoolBase, - kClients, - kNeedDrain, - kAddClient, - kRemoveClient, - kGetDispatcher -} = __nccwpck_require__(3198) -const Pool = __nccwpck_require__(4634) -const { kUrl, kInterceptors } = __nccwpck_require__(2785) -const { parseOrigin } = __nccwpck_require__(3983) -const kFactory = Symbol('factory') +merge(proto, __nccwpck_require__(83280)); -const kOptions = Symbol('options') -const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor') -const kCurrentWeight = Symbol('kCurrentWeight') -const kIndex = Symbol('kIndex') -const kWeight = Symbol('kWeight') -const kMaxWeightPerServer = Symbol('kMaxWeightPerServer') -const kErrorPenalty = Symbol('kErrorPenalty') -function getGreatestCommonDivisor (a, b) { - if (b === 0) return a - return getGreatestCommonDivisor(b, a % b) -} +/***/ }), -function defaultFactory (origin, opts) { - return new Pool(origin, opts) -} +/***/ 83280: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -class BalancedPool extends PoolBase { - constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) { - super() +/* eslint-disable no-use-before-define */ +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); +const querystring = __nccwpck_require__(63477); +const { + Base64: { encode: str2Base64 } +} = __nccwpck_require__(84139); - this[kOptions] = opts - this[kIndex] = -1 - this[kCurrentWeight] = 0 +const proto = exports; - this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100 - this[kErrorPenalty] = this[kOptions].errorPenalty || 15 +proto.processObjectSave = async function processObjectSave(sourceObject, targetObject, process, targetBucket) { + checkArgs(sourceObject, 'sourceObject'); + checkArgs(targetObject, 'targetObject'); + checkArgs(process, 'process'); + targetObject = this._objectName(targetObject); + if (targetBucket) { + _checkBucketName(targetBucket); + } - if (!Array.isArray(upstreams)) { - upstreams = [upstreams] - } + const params = this._objectRequestParams('POST', sourceObject, { + subres: 'x-oss-process' + }); - if (typeof factory !== 'function') { - throw new InvalidArgumentError('factory must be a function.') - } + const bucketParam = targetBucket ? `,b_${str2Base64(targetBucket)}` : ''; + targetObject = str2Base64(targetObject); - this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool) - ? opts.interceptors.BalancedPool - : [] - this[kFactory] = factory + const content = { + 'x-oss-process': `${process}|sys/saveas,o_${targetObject}${bucketParam}` + }; + params.content = querystring.stringify(content); - for (const upstream of upstreams) { - this.addUpstream(upstream) - } - this._updateBalancedPoolStats() - } + const result = await this.request(params); + return { + res: result.res, + status: result.res.status + }; +}; - addUpstream (upstream) { - const upstreamOrigin = parseOrigin(upstream).origin +function checkArgs(name, key) { + if (!name) { + throw new Error(`${key} is required`); + } + if (typeof name !== 'string') { + throw new Error(`${key} must be String`); + } +} - if (this[kClients].find((pool) => ( - pool[kUrl].origin === upstreamOrigin && - pool.closed !== true && - pool.destroyed !== true - ))) { - return this - } - const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions])) - this[kAddClient](pool) - pool.on('connect', () => { - pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty]) - }) +/***/ }), - pool.on('connectionError', () => { - pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) - this._updateBalancedPoolStats() - }) +/***/ 5944: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - pool.on('disconnect', (...args) => { - const err = args[2] - if (err && err.code === 'UND_ERR_SOCKET') { - // decrease the weight of the pool. - pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) - this._updateBalancedPoolStats() - } - }) +/* eslint-disable no-async-promise-executor */ - for (const client of this[kClients]) { - client[kWeight] = this[kMaxWeightPerServer] - } +const debug = __nccwpck_require__(38237)('ali-oss:multipart-copy'); +const copy = __nccwpck_require__(30952); - this._updateBalancedPoolStats() +const proto = exports; - return this +/** + * Upload a part copy in a multipart from the source bucket/object + * used with initMultipartUpload and completeMultipartUpload. + * @param {String} name copy object name + * @param {String} uploadId the upload id + * @param {Number} partNo the part number + * @param {String} range like 0-102400 part size need to copy + * @param {Object} sourceData + * {String} sourceData.sourceKey the source object name + * {String} sourceData.sourceBucketName the source bucket name + * @param {Object} options + */ +/* eslint max-len: [0] */ +proto.uploadPartCopy = async function uploadPartCopy(name, uploadId, partNo, range, sourceData, options = {}) { + options.headers = options.headers || {}; + const versionId = options.versionId || (options.subres && options.subres.versionId) || null; + let copySource; + if (versionId) { + copySource = `/${sourceData.sourceBucketName}/${encodeURIComponent(sourceData.sourceKey)}?versionId=${versionId}`; + } else { + copySource = `/${sourceData.sourceBucketName}/${encodeURIComponent(sourceData.sourceKey)}`; } - _updateBalancedPoolStats () { - this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0) + options.headers['x-oss-copy-source'] = copySource; + if (range) { + options.headers['x-oss-copy-source-range'] = `bytes=${range}`; } - removeUpstream (upstream) { - const upstreamOrigin = parseOrigin(upstream).origin + options.subres = { + partNumber: partNo, + uploadId + }; + const params = this._objectRequestParams('PUT', name, options); + params.mime = options.mime; + params.successStatuses = [200]; - const pool = this[kClients].find((pool) => ( - pool[kUrl].origin === upstreamOrigin && - pool.closed !== true && - pool.destroyed !== true - )) + const result = await this.request(params); - if (pool) { - this[kRemoveClient](pool) - } + return { + name, + etag: result.res.headers.etag, + res: result.res + }; +}; - return this +/** + * @param {String} name copy object name + * @param {Object} sourceData + * {String} sourceData.sourceKey the source object name + * {String} sourceData.sourceBucketName the source bucket name + * {Number} sourceData.startOffset data copy start byte offset, e.g: 0 + * {Number} sourceData.endOffset data copy end byte offset, e.g: 102400 + * @param {Object} options + * {Number} options.partSize + */ +proto.multipartUploadCopy = async function multipartUploadCopy(name, sourceData, options = {}) { + this.resetCancelFlag(); + const { versionId = null } = options; + const metaOpt = { + versionId + }; + const objectMeta = await this._getObjectMeta(sourceData.sourceBucketName, sourceData.sourceKey, metaOpt); + const fileSize = objectMeta.res.headers['content-length']; + sourceData.startOffset = sourceData.startOffset || 0; + sourceData.endOffset = sourceData.endOffset || fileSize; + + if (options.checkpoint && options.checkpoint.uploadId) { + return await this._resumeMultipartCopy(options.checkpoint, sourceData, options); } - get upstreams () { - return this[kClients] - .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true) - .map((p) => p[kUrl].origin) + const minPartSize = 100 * 1024; + + const copySize = sourceData.endOffset - sourceData.startOffset; + if (copySize < minPartSize) { + throw new Error(`copySize must not be smaller than ${minPartSize}`); } - [kGetDispatcher] () { - // We validate that pools is greater than 0, - // otherwise we would have to wait until an upstream - // is added, which might never happen. - if (this[kClients].length === 0) { - throw new BalancedPoolMissingUpstreamError() - } + if (options.partSize && options.partSize < minPartSize) { + throw new Error(`partSize must not be smaller than ${minPartSize}`); + } - const dispatcher = this[kClients].find(dispatcher => ( - !dispatcher[kNeedDrain] && - dispatcher.closed !== true && - dispatcher.destroyed !== true - )) + const init = await this.initMultipartUpload(name, options); + const { uploadId } = init; + const partSize = this._getPartSize(copySize, options.partSize); - if (!dispatcher) { - return - } + const checkpoint = { + name, + copySize, + partSize, + uploadId, + doneParts: [] + }; - const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true) + if (options && options.progress) { + await options.progress(0, checkpoint, init.res); + } - if (allClientsBusy) { - return - } + return await this._resumeMultipartCopy(checkpoint, sourceData, options); +}; - let counter = 0 +/* + * Resume multipart copy from checkpoint. The checkpoint will be + * updated after each successful part copy. + * @param {Object} checkpoint the checkpoint + * @param {Object} options + */ +proto._resumeMultipartCopy = async function _resumeMultipartCopy(checkpoint, sourceData, options) { + if (this.isCancel()) { + throw this._makeCancelEvent(); + } + const { versionId = null } = options; + const metaOpt = { + versionId + }; + const { copySize, partSize, uploadId, doneParts, name } = checkpoint; - let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain]) + const partOffs = this._divideMultipartCopyParts(copySize, partSize, sourceData.startOffset); + const numParts = partOffs.length; - while (counter++ < this[kClients].length) { - this[kIndex] = (this[kIndex] + 1) % this[kClients].length - const pool = this[kClients][this[kIndex]] + const uploadPartCopyOptions = { + headers: {} + }; - // find pool index with the largest weight - if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) { - maxWeightIndex = this[kIndex] - } + if (options.copyheaders) { + copy(options.copyheaders).to(uploadPartCopyOptions.headers); + } + if (versionId) { + copy(metaOpt).to(uploadPartCopyOptions); + } - // decrease the current weight every `this[kClients].length`. - if (this[kIndex] === 0) { - // Set the current weight to the next lower weight. - this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor] + const uploadPartJob = function uploadPartJob(self, partNo, source) { + return new Promise(async (resolve, reject) => { + try { + if (!self.isCancel()) { + const pi = partOffs[partNo - 1]; + const range = `${pi.start}-${pi.end - 1}`; + + let result; + try { + result = await self.uploadPartCopy(name, uploadId, partNo, range, source, uploadPartCopyOptions); + } catch (error) { + if (error.status === 404) { + throw self._makeAbortEvent(); + } + throw error; + } + if (!self.isCancel()) { + debug(`content-range ${result.res.headers['content-range']}`); + doneParts.push({ + number: partNo, + etag: result.res.headers.etag + }); + checkpoint.doneParts = doneParts; - if (this[kCurrentWeight] <= 0) { - this[kCurrentWeight] = this[kMaxWeightPerServer] + if (options && options.progress) { + await options.progress(doneParts.length / numParts, checkpoint, result.res); + } + } } + resolve(); + } catch (err) { + err.partNum = partNo; + reject(err); } - if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) { - return pool + }); + }; + + const all = Array.from(new Array(numParts), (x, i) => i + 1); + const done = doneParts.map(p => p.number); + const todo = all.filter(p => done.indexOf(p) < 0); + const defaultParallel = 5; + const parallel = options.parallel || defaultParallel; + + if (this.checkBrowserAndVersion('Internet Explorer', '10') || parallel === 1) { + for (let i = 0; i < todo.length; i++) { + if (this.isCancel()) { + throw this._makeCancelEvent(); } + /* eslint no-await-in-loop: [0] */ + await uploadPartJob(this, todo[i], sourceData); } + } else { + // upload in parallel + const errors = await this._parallelNode(todo, parallel, uploadPartJob, sourceData); - this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight] - this[kIndex] = maxWeightIndex - return this[kClients][maxWeightIndex] + const abortEvent = errors.find(err => err.name === 'abort'); + if (abortEvent) throw abortEvent; + + if (this.isCancel()) { + throw this._makeCancelEvent(); + } + + // check errors after all jobs are completed + if (errors && errors.length > 0) { + const err = errors[0]; + err.message = `Failed to copy some parts with error: ${err.toString()} part_num: ${err.partNum}`; + throw err; + } } -} -module.exports = BalancedPool + return await this.completeMultipartUpload(name, uploadId, doneParts, options); +}; + +proto._divideMultipartCopyParts = function _divideMultipartCopyParts(fileSize, partSize, startOffset) { + const numParts = Math.ceil(fileSize / partSize); + + const partOffs = []; + for (let i = 0; i < numParts; i++) { + const start = partSize * i + startOffset; + const end = Math.min(start + partSize, fileSize + startOffset); + + partOffs.push({ + start, + end + }); + } + + return partOffs; +}; + +/** + * Get Object Meta + * @param {String} bucket bucket name + * @param {String} name object name + * @param {Object} options + */ +proto._getObjectMeta = async function _getObjectMeta(bucket, name, options) { + const currentBucket = this.getBucket(); + this.setBucket(bucket); + const data = await this.head(name, options); + this.setBucket(currentBucket); + return data; +}; /***/ }), -/***/ 6101: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 65976: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; +const copy = __nccwpck_require__(30952); +const callback = __nccwpck_require__(26725); +const { deepCopyWith } = __nccwpck_require__(97150); +const { isBuffer } = __nccwpck_require__(55850); +const { omit } = __nccwpck_require__(72577); +const proto = exports; + +/** + * List the on-going multipart uploads + * https://help.aliyun.com/document_detail/31997.html + * @param {Object} options + * @return {Array} the multipart uploads + */ +proto.listUploads = async function listUploads(query, options) { + options = options || {}; + const opt = {}; + copy(options).to(opt); + opt.subres = 'uploads'; + const params = this._objectRequestParams('GET', '', opt); + params.query = query; + params.xmlResponse = true; + params.successStatuses = [200]; + + const result = await this.request(params); + let uploads = result.data.Upload || []; + if (!Array.isArray(uploads)) { + uploads = [uploads]; + } + uploads = uploads.map(up => ({ + name: up.Key, + uploadId: up.UploadId, + initiated: up.Initiated + })); -const { kConstruct } = __nccwpck_require__(9174) -const { urlEquals, fieldValues: getFieldValues } = __nccwpck_require__(2396) -const { kEnumerableProperty, isDisturbed } = __nccwpck_require__(3983) -const { kHeadersList } = __nccwpck_require__(2785) -const { webidl } = __nccwpck_require__(1744) -const { Response, cloneResponse } = __nccwpck_require__(7823) -const { Request } = __nccwpck_require__(8359) -const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) -const { fetching } = __nccwpck_require__(4881) -const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = __nccwpck_require__(2538) -const assert = __nccwpck_require__(9491) -const { getGlobalDispatcher } = __nccwpck_require__(1892) + return { + res: result.res, + uploads, + bucket: result.data.Bucket, + nextKeyMarker: result.data.NextKeyMarker, + nextUploadIdMarker: result.data.NextUploadIdMarker, + isTruncated: result.data.IsTruncated === 'true' + }; +}; /** - * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation - * @typedef {Object} CacheBatchOperation - * @property {'delete' | 'put'} type - * @property {any} request - * @property {any} response - * @property {import('../../types/cache').CacheQueryOptions} options + * List the done uploadPart parts + * @param {String} name object name + * @param {String} uploadId multipart upload id + * @param {Object} query + * {Number} query.max-parts The maximum part number in the response of the OSS. Default value: 1000 + * {Number} query.part-number-marker Starting position of a specific list. + * {String} query.encoding-type Specify the encoding of the returned content and the encoding type. + * @param {Object} options + * @return {Object} result */ +proto.listParts = async function listParts(name, uploadId, query, options) { + options = options || {}; + const opt = {}; + copy(options).to(opt); + opt.subres = { + uploadId + }; + const params = this._objectRequestParams('GET', name, opt); + params.query = query; + params.xmlResponse = true; + params.successStatuses = [200]; + + const result = await this.request(params); + + return { + res: result.res, + uploadId: result.data.UploadId, + bucket: result.data.Bucket, + name: result.data.Key, + partNumberMarker: result.data.PartNumberMarker, + nextPartNumberMarker: result.data.NextPartNumberMarker, + maxParts: result.data.MaxParts, + isTruncated: result.data.IsTruncated, + parts: result.data.Part || [] + }; +}; /** - * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list - * @typedef {[any, any][]} requestResponseList + * Abort a multipart upload transaction + * @param {String} name the object name + * @param {String} uploadId the upload id + * @param {Object} options */ +proto.abortMultipartUpload = async function abortMultipartUpload(name, uploadId, options) { + this._stop(); + options = options || {}; + const opt = {}; + copy(options).to(opt); + opt.subres = { uploadId }; + const params = this._objectRequestParams('DELETE', name, opt); + params.successStatuses = [204]; -class Cache { - /** - * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list - * @type {requestResponseList} - */ - #relevantRequestResponseList + const result = await this.request(params); + return { + res: result.res + }; +}; - constructor () { - if (arguments[0] !== kConstruct) { - webidl.illegalConstructor() - } +/** + * Initiate a multipart upload transaction + * @param {String} name the object name + * @param {Object} options + * @return {String} upload id + */ +proto.initMultipartUpload = async function initMultipartUpload(name, options) { + options = options || {}; + const opt = {}; + copy(options).to(opt); + opt.headers = opt.headers || {}; + this._convertMetaToHeaders(options.meta, opt.headers); - this.#relevantRequestResponseList = arguments[1] - } + opt.subres = 'uploads'; + const params = this._objectRequestParams('POST', name, opt); + params.mime = options.mime; + params.xmlResponse = true; + params.successStatuses = [200]; - async match (request, options = {}) { - webidl.brandCheck(this, Cache) - webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' }) + const result = await this.request(params); - request = webidl.converters.RequestInfo(request) - options = webidl.converters.CacheQueryOptions(options) + return { + res: result.res, + bucket: result.data.Bucket, + name: result.data.Key, + uploadId: result.data.UploadId + }; +}; - const p = await this.matchAll(request, options) +/** + * Upload a part in a multipart upload transaction + * @param {String} name the object name + * @param {String} uploadId the upload id + * @param {Integer} partNo the part number + * @param {File} file upload File, whole File + * @param {Integer} start part start bytes e.g: 102400 + * @param {Integer} end part end bytes e.g: 204800 + * @param {Object} options + */ +proto.uploadPart = async function uploadPart(name, uploadId, partNo, file, start, end, options) { + const data = { + size: end - start + }; + const isBrowserEnv = process && process.browser; + isBrowserEnv + ? (data.content = await this._createBuffer(file, start, end)) + : (data.stream = await this._createStream(file, start, end)); + return await this._uploadPart(name, uploadId, partNo, data, options); +}; - if (p.length === 0) { - return - } +/** + * Complete a multipart upload transaction + * @param {String} name the object name + * @param {String} uploadId the upload id + * @param {Array} parts the uploaded parts, each in the structure: + * {Integer} number partNo + * {String} etag part etag uploadPartCopy result.res.header.etag + * @param {Object} options + * {Object} options.callback The callback parameter is composed of a JSON string encoded in Base64 + * {String} options.callback.url the OSS sends a callback request to this URL + * {String} options.callback.host The host header value for initiating callback requests + * {String} options.callback.body The value of the request body when a callback is initiated + * {String} options.callback.contentType The Content-Type of the callback requests initiatiated + * {Object} options.callback.customValue Custom parameters are a map of key-values, e.g: + * customValue = { + * key1: 'value1', + * key2: 'value2' + * } + */ +proto.completeMultipartUpload = async function completeMultipartUpload(name, uploadId, parts, options) { + const completeParts = parts + .concat() + .sort((a, b) => a.number - b.number) + .filter((item, index, arr) => !index || item.number !== arr[index - 1].number); + let xml = '\n\n'; + for (let i = 0; i < completeParts.length; i++) { + const p = completeParts[i]; + xml += '\n'; + xml += `${p.number}\n`; + xml += `${p.etag}\n`; + xml += '\n'; + } + xml += ''; - return p[0] - } + options = options || {}; + let opt = {}; + opt = deepCopyWith(options, _ => { + if (isBuffer(_)) return null; + }); + opt.subres = { uploadId }; + opt.headers = omit(opt.headers, ['x-oss-server-side-encryption', 'x-oss-storage-class']); - async matchAll (request = undefined, options = {}) { - webidl.brandCheck(this, Cache) + const params = this._objectRequestParams('POST', name, opt); + callback.encodeCallback(params, opt); + params.mime = 'xml'; + params.content = xml; - if (request !== undefined) request = webidl.converters.RequestInfo(request) - options = webidl.converters.CacheQueryOptions(options) + if (!(params.headers && params.headers['x-oss-callback'])) { + params.xmlResponse = true; + } + params.successStatuses = [200]; + const result = await this.request(params); - // 1. - let r = null + if (options.progress) { + await options.progress(1, null, result.res); + } - // 2. - if (request !== undefined) { - if (request instanceof Request) { - // 2.1.1 - r = request[kState] + const ret = { + res: result.res, + bucket: params.bucket, + name, + etag: result.res.headers.etag + }; - // 2.1.2 - if (r.method !== 'GET' && !options.ignoreMethod) { - return [] - } - } else if (typeof request === 'string') { - // 2.2.1 - r = new Request(request)[kState] - } - } + if (params.headers && params.headers['x-oss-callback']) { + ret.data = JSON.parse(result.data.toString()); + } - // 5. - // 5.1 - const responses = [] + return ret; +}; - // 5.2 - if (request === undefined) { - // 5.2.1 - for (const requestResponse of this.#relevantRequestResponseList) { - responses.push(requestResponse[1]) - } - } else { // 5.3 - // 5.3.1 - const requestResponses = this.#queryCache(r, options) +/** + * Upload a part in a multipart upload transaction + * @param {String} name the object name + * @param {String} uploadId the upload id + * @param {Integer} partNo the part number + * @param {Object} data the body data + * @param {Object} options + */ +proto._uploadPart = async function _uploadPart(name, uploadId, partNo, data, options) { + options = options || {}; + const opt = {}; + copy(options).to(opt); + opt.headers = opt.headers || {}; + opt.headers['Content-Length'] = data.size; + + // Uploading shards does not require x-oss server side encryption + opt.headers = omit(opt.headers, ['x-oss-server-side-encryption']); + opt.subres = { + partNumber: partNo, + uploadId + }; + + const params = this._objectRequestParams('PUT', name, opt); + params.mime = opt.mime; + const isBrowserEnv = process && process.browser; + isBrowserEnv ? (params.content = data.content) : (params.stream = data.stream); + params.successStatuses = [200]; + params.disabledMD5 = options.disabledMD5; + + const result = await this.request(params); + + if (!result.res.headers.etag) { + throw new Error( + 'Please set the etag of expose-headers in OSS \n https://help.aliyun.com/document_detail/32069.html' + ); + } + if (data.stream) { + data.stream = null; + params.stream = null; + } + return { + name, + etag: result.res.headers.etag, + res: result.res + }; +}; - // 5.3.2 - for (const requestResponse of requestResponses) { - responses.push(requestResponse[1]) - } - } - // 5.4 - // We don't implement CORs so we don't need to loop over the responses, yay! +/***/ }), - // 5.5.1 - const responseList = [] +/***/ 17330: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 5.5.2 - for (const response of responses) { - // 5.5.2.1 - const responseObject = new Response(response.body?.source ?? null) - const body = responseObject[kState].body - responseObject[kState] = response - responseObject[kState].body = body - responseObject[kHeaders][kHeadersList] = response.headersList - responseObject[kHeaders][kGuard] = 'immutable' +const urlutil = __nccwpck_require__(57310); +const utility = __nccwpck_require__(33877); +const copy = __nccwpck_require__(30952); +const signHelper = __nccwpck_require__(5150); +const { isIP } = __nccwpck_require__(42439); +const { setSTSToken } = __nccwpck_require__(84622); +const { isFunction } = __nccwpck_require__(18818); - responseList.push(responseObject) - } +const proto = exports; - // 6. - return Object.freeze(responseList) +/** + * asyncSignatureUrl + * @param {String} name object name + * @param {Object} options options + * @param {boolean} [strictObjectNameValidation=true] the flag of verifying object name strictly + */ +proto.asyncSignatureUrl = async function asyncSignatureUrl(name, options, strictObjectNameValidation = true) { + if (isIP(this.options.endpoint.hostname)) { + throw new Error('can not get the object URL when endpoint is IP'); } - async add (request) { - webidl.brandCheck(this, Cache) - webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' }) + if (strictObjectNameValidation && /^\?/.test(name)) { + throw new Error(`Invalid object name ${name}`); + } - request = webidl.converters.RequestInfo(request) + options = options || {}; + name = this._objectName(name); + options.method = options.method || 'GET'; + const expires = utility.timestamp() + (options.expires || 1800); + const params = { + bucket: this.options.bucket, + object: name + }; - // 1. - const requests = [request] + const resource = this._getResource(params); - // 2. - const responseArrayPromise = this.addAll(requests) + if (this.options.stsToken && isFunction(this.options.refreshSTSToken)) { + await setSTSToken.call(this); + } - // 3. - return await responseArrayPromise + if (this.options.stsToken) { + options['security-token'] = this.options.stsToken; } - async addAll (requests) { - webidl.brandCheck(this, Cache) - webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) + const signRes = signHelper._signatureForURL(this.options.accessKeySecret, options, resource, expires); - requests = webidl.converters['sequence'](requests) + const url = urlutil.parse(this._getReqUrl(params)); + url.query = { + OSSAccessKeyId: this.options.accessKeyId, + Expires: expires, + Signature: signRes.Signature + }; - // 1. - const responsePromises = [] + copy(signRes.subResource).to(url.query); - // 2. - const requestList = [] + return url.format(); +}; - // 3. - for (const request of requests) { - if (typeof request === 'string') { - continue - } - // 3.1 - const r = request[kState] +/***/ }), - // 3.2 - if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') { - throw webidl.errors.exception({ - header: 'Cache.addAll', - message: 'Expected http/s scheme when method is not GET.' - }) - } +/***/ 32273: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +const { policy2Str } = __nccwpck_require__(30061); +const signHelper = __nccwpck_require__(5150); +const { isObject } = __nccwpck_require__(81269); + +const proto = exports; + +/** + * @param {Object or JSON} policy specifies the validity of the fields in the request. + * @return {Object} params + * {String} params.OSSAccessKeyId + * {String} params.Signature + * {String} params.policy JSON text encoded with UTF-8 and Base64. + */ +proto.calculatePostSignature = function calculatePostSignature(policy) { + if (!isObject(policy) && typeof policy !== 'string') { + throw new Error('policy must be JSON string or Object'); + } + if (!isObject(policy)) { + try { + JSON.stringify(JSON.parse(policy)); + } catch (error) { + throw new Error('policy must be JSON string or Object'); } + } + policy = Buffer.from(policy2Str(policy), 'utf8').toString('base64'); - // 4. - /** @type {ReturnType[]} */ - const fetchControllers = [] + const Signature = signHelper.computeSignature(this.options.accessKeySecret, policy); - // 5. - for (const request of requests) { - // 5.1 - const r = new Request(request)[kState] + const query = { + OSSAccessKeyId: this.options.accessKeyId, + Signature, + policy + }; + return query; +}; - // 5.2 - if (!urlIsHttpHttpsScheme(r.url)) { - throw webidl.errors.exception({ - header: 'Cache.addAll', - message: 'Expected http/s scheme.' - }) - } - // 5.4 - r.initiator = 'fetch' - r.destination = 'subresource' +/***/ }), - // 5.5 - requestList.push(r) +/***/ 45815: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 5.6 - const responsePromise = createDeferredPromise() +const { checkBucketName: _checkBucketName } = __nccwpck_require__(30922); - // 5.7 - fetchControllers.push(fetching({ - request: r, - dispatcher: getGlobalDispatcher(), - processResponse (response) { - // 1. - if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) { - responsePromise.reject(webidl.errors.exception({ - header: 'Cache.addAll', - message: 'Received an invalid status code or the request failed.' - })) - } else if (response.headersList.contains('vary')) { // 2. - // 2.1 - const fieldValues = getFieldValues(response.headersList.get('vary')) +const proto = exports; - // 2.2 - for (const fieldValue of fieldValues) { - // 2.2.1 - if (fieldValue === '*') { - responsePromise.reject(webidl.errors.exception({ - header: 'Cache.addAll', - message: 'invalid vary field value' - })) +const REPLACE_HEDERS = [ + 'content-type', + 'content-encoding', + 'content-language', + 'content-disposition', + 'cache-control', + 'expires' +]; - for (const controller of fetchControllers) { - controller.abort() - } +proto.copy = async function copy(name, sourceName, bucketName, options) { + if (typeof bucketName === 'object') { + options = bucketName; // 兼容旧版本,旧版本第三个参数为options + } + options = options || {}; + options.headers = options.headers || {}; - return - } - } - } - }, - processResponseEndOfBody (response) { - // 1. - if (response.aborted) { - responsePromise.reject(new DOMException('aborted', 'AbortError')) - return - } + Object.keys(options.headers).forEach(key => { + options.headers[`x-oss-copy-source-${key.toLowerCase()}`] = options.headers[key]; + }); + if (options.meta || Object.keys(options.headers).find(_ => REPLACE_HEDERS.includes(_.toLowerCase()))) { + options.headers['x-oss-metadata-directive'] = 'REPLACE'; + } + this._convertMetaToHeaders(options.meta, options.headers); - // 2. - responsePromise.resolve(response) - } - })) + sourceName = this._getSourceName(sourceName, bucketName); - // 5.8 - responsePromises.push(responsePromise.promise) - } + if (options.versionId) { + sourceName = `${sourceName}?versionId=${options.versionId}`; + } - // 6. - const p = Promise.all(responsePromises) + options.headers['x-oss-copy-source'] = sourceName; - // 7. - const responses = await p + const params = this._objectRequestParams('PUT', name, options); + params.xmlResponse = true; + params.successStatuses = [200, 304]; - // 7.1 - const operations = [] + const result = await this.request(params); - // 7.2 - let index = 0 + let { data } = result; + if (data) { + data = { + etag: data.ETag, + lastModified: data.LastModified + }; + } - // 7.3 - for (const response of responses) { - // 7.3.1 - /** @type {CacheBatchOperation} */ - const operation = { - type: 'put', // 7.3.2 - request: requestList[index], // 7.3.3 - response // 7.3.4 - } + return { + data, + res: result.res + }; +}; - operations.push(operation) // 7.3.5 +// todo delete +proto._getSourceName = function _getSourceName(sourceName, bucketName) { + if (typeof bucketName === 'string') { + sourceName = this._objectName(sourceName); + } else if (sourceName[0] !== '/') { + bucketName = this.options.bucket; + } else { + bucketName = sourceName.replace(/\/(.+?)(\/.*)/, '$1'); + sourceName = sourceName.replace(/(\/.+?\/)(.*)/, '$2'); + } - index++ // 7.3.6 - } + _checkBucketName(bucketName); - // 7.5 - const cacheJobPromise = createDeferredPromise() + sourceName = encodeURIComponent(sourceName); - // 7.6.1 - let errorData = null + sourceName = `/${bucketName}/${sourceName}`; + return sourceName; +}; - // 7.6.2 - try { - this.#batchCacheOperations(operations) - } catch (e) { - errorData = e - } - // 7.6.3 - queueMicrotask(() => { - // 7.6.3.1 - if (errorData === null) { - cacheJobPromise.resolve(undefined) - } else { - // 7.6.3.2 - cacheJobPromise.reject(errorData) - } - }) +/***/ }), - // 7.7 - return cacheJobPromise.promise +/***/ 45515: +/***/ ((__unused_webpack_module, exports) => { + +const proto = exports; +/** + * delete + * @param {String} name - object name + * @param {Object} options + * @param {{res}} + */ + +proto.delete = async function _delete(name, options = {}) { + options.subres = Object.assign({}, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; } + const params = this._objectRequestParams('DELETE', name, options); + params.successStatuses = [204]; - async put (request, response) { - webidl.brandCheck(this, Cache) - webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' }) + const result = await this.request(params); - request = webidl.converters.RequestInfo(request) - response = webidl.converters.Response(response) + return { + res: result.res + }; +}; - // 1. - let innerRequest = null - // 2. - if (request instanceof Request) { - innerRequest = request[kState] - } else { // 3. - innerRequest = new Request(request)[kState] +/***/ }), + +/***/ 53103: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +/* eslint-disable object-curly-newline */ +const utility = __nccwpck_require__(33877); +const { obj2xml } = __nccwpck_require__(96846); + +const proto = exports; + +proto.deleteMulti = async function deleteMulti(names, options = {}) { + const objects = []; + if (!names || !names.length) { + throw new Error('names is required'); + } + for (let i = 0; i < names.length; i++) { + const object = {}; + if (typeof names[i] === 'string') { + object.Key = utility.escape(this._objectName(names[i])); + } else { + const { key, versionId } = names[i]; + object.Key = utility.escape(this._objectName(key)); + object.VersionId = versionId; } + objects.push(object); + } - // 4. - if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') { - throw webidl.errors.exception({ - header: 'Cache.put', - message: 'Expected an http/s scheme when method is not GET' - }) + const paramXMLObj = { + Delete: { + Quiet: !!options.quiet, + Object: objects } + }; - // 5. - const innerResponse = response[kState] + const paramXML = obj2xml(paramXMLObj, { + headers: true + }); - // 6. - if (innerResponse.status === 206) { - throw webidl.errors.exception({ - header: 'Cache.put', - message: 'Got 206 status' - }) + options.subres = Object.assign({ delete: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; + } + const params = this._objectRequestParams('POST', '', options); + params.mime = 'xml'; + params.content = paramXML; + params.xmlResponse = true; + params.successStatuses = [200]; + const result = await this.request(params); + + const r = result.data; + let deleted = (r && r.Deleted) || null; + if (deleted) { + if (!Array.isArray(deleted)) { + deleted = [deleted]; } + } + return { + res: result.res, + deleted: deleted || [] + }; +}; - // 7. - if (innerResponse.headersList.contains('vary')) { - // 7.1. - const fieldValues = getFieldValues(innerResponse.headersList.get('vary')) - // 7.2. - for (const fieldValue of fieldValues) { - // 7.2.1 - if (fieldValue === '*') { - throw webidl.errors.exception({ - header: 'Cache.put', - message: 'Got * vary field value' - }) - } - } - } +/***/ }), - // 8. - if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) { - throw webidl.errors.exception({ - header: 'Cache.put', - message: 'Response body is locked or disturbed' - }) - } - - // 9. - const clonedResponse = cloneResponse(innerResponse) +/***/ 17031: +/***/ ((__unused_webpack_module, exports) => { - // 10. - const bodyReadPromise = createDeferredPromise() +const proto = exports; +/** + * deleteObjectTagging + * @param {String} name - object name + * @param {Object} options + */ - // 11. - if (innerResponse.body != null) { - // 11.1 - const stream = innerResponse.body.stream +proto.deleteObjectTagging = async function deleteObjectTagging(name, options = {}) { + options.subres = Object.assign({ tagging: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; + } + name = this._objectName(name); + const params = this._objectRequestParams('DELETE', name, options); + params.successStatuses = [204]; + const result = await this.request(params); - // 11.2 - const reader = stream.getReader() + return { + status: result.status, + res: result.res + }; +}; - // 11.3 - readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject) - } else { - bodyReadPromise.resolve(undefined) - } - // 12. - /** @type {CacheBatchOperation[]} */ - const operations = [] +/***/ }), - // 13. - /** @type {CacheBatchOperation} */ - const operation = { - type: 'put', // 14. - request: innerRequest, // 15. - response: clonedResponse // 16. - } +/***/ 42659: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 17. - operations.push(operation) +const urlutil = __nccwpck_require__(57310); +const { isIP } = __nccwpck_require__(42439); - // 19. - const bytes = await bodyReadPromise.promise +const proto = exports; - if (clonedResponse.body != null) { - clonedResponse.body.source = bytes - } +/** + * Get Object url by name + * @param {String} name - object name + * @param {String} [baseUrl] - If provide `baseUrl`, will use `baseUrl` instead the default `endpoint and bucket`. + * @return {String} object url include bucket + */ +proto.generateObjectUrl = function generateObjectUrl(name, baseUrl) { + if (isIP(this.options.endpoint.hostname)) { + throw new Error('can not get the object URL when endpoint is IP'); + } + if (!baseUrl) { + baseUrl = this.options.endpoint.format(); + const copyUrl = urlutil.parse(baseUrl); + const { bucket } = this.options; + + copyUrl.hostname = `${bucket}.${copyUrl.hostname}`; + copyUrl.host = `${bucket}.${copyUrl.host}`; + baseUrl = copyUrl.format(); + } else if (baseUrl[baseUrl.length - 1] !== '/') { + baseUrl += '/'; + } + return baseUrl + this._escape(this._objectName(name)); +}; - // 19.1 - const cacheJobPromise = createDeferredPromise() - // 19.2.1 - let errorData = null +/***/ }), - // 19.2.2 - try { - this.#batchCacheOperations(operations) - } catch (e) { - errorData = e - } +/***/ 21973: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 19.2.3 - queueMicrotask(() => { - // 19.2.3.1 - if (errorData === null) { - cacheJobPromise.resolve() - } else { // 19.2.3.2 - cacheJobPromise.reject(errorData) - } - }) +const fs = __nccwpck_require__(57147); +const is = __nccwpck_require__(65565); - return cacheJobPromise.promise +const proto = exports; +/** + * get + * @param {String} name - object name + * @param {String | Stream} file + * @param {Object} options + * @param {{res}} + */ +proto.get = async function get(name, file, options = {}) { + let writeStream = null; + let needDestroy = false; + + if (is.writableStream(file)) { + writeStream = file; + } else if (is.string(file)) { + writeStream = fs.createWriteStream(file); + needDestroy = true; + } else { + // get(name, options) + options = file; } - async delete (request, options = {}) { - webidl.brandCheck(this, Cache) - webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' }) - - request = webidl.converters.RequestInfo(request) - options = webidl.converters.CacheQueryOptions(options) + options = options || {}; + const isBrowserEnv = process && process.browser; + const responseCacheControl = options.responseCacheControl === null ? '' : 'no-cache'; + const defaultSubresOptions = + isBrowserEnv && responseCacheControl ? { 'response-cache-control': responseCacheControl } : {}; + options.subres = Object.assign(defaultSubresOptions, options.subres); - /** - * @type {Request} - */ - let r = null + if (options.versionId) { + options.subres.versionId = options.versionId; + } + if (options.process) { + options.subres['x-oss-process'] = options.process; + } - if (request instanceof Request) { - r = request[kState] + let result; + try { + const params = this._objectRequestParams('GET', name, options); + params.writeStream = writeStream; + params.successStatuses = [200, 206, 304]; - if (r.method !== 'GET' && !options.ignoreMethod) { - return false - } - } else { - assert(typeof request === 'string') + result = await this.request(params); - r = new Request(request)[kState] + if (needDestroy) { + writeStream.destroy(); } - - /** @type {CacheBatchOperation[]} */ - const operations = [] - - /** @type {CacheBatchOperation} */ - const operation = { - type: 'delete', - request: r, - options + } catch (err) { + if (needDestroy) { + writeStream.destroy(); + // should delete the exists file before throw error + await this._deleteFileSafe(file); } + throw err; + } - operations.push(operation) + return { + res: result.res, + content: result.data + }; +}; - const cacheJobPromise = createDeferredPromise() - let errorData = null - let requestResponses +/***/ }), - try { - requestResponses = this.#batchCacheOperations(operations) - } catch (e) { - errorData = e - } +/***/ 86599: +/***/ ((__unused_webpack_module, exports) => { - queueMicrotask(() => { - if (errorData === null) { - cacheJobPromise.resolve(!!requestResponses?.length) - } else { - cacheJobPromise.reject(errorData) - } - }) +const proto = exports; - return cacheJobPromise.promise +/* + * Get object's ACL + * @param {String} name the object key + * @param {Object} options + * @return {Object} + */ +proto.getACL = async function getACL(name, options = {}) { + options.subres = Object.assign({ acl: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; } + name = this._objectName(name); - /** - * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys - * @param {any} request - * @param {import('../../types/cache').CacheQueryOptions} options - * @returns {readonly Request[]} - */ - async keys (request = undefined, options = {}) { - webidl.brandCheck(this, Cache) + const params = this._objectRequestParams('GET', name, options); + params.successStatuses = [200]; + params.xmlResponse = true; - if (request !== undefined) request = webidl.converters.RequestInfo(request) - options = webidl.converters.CacheQueryOptions(options) + const result = await this.request(params); - // 1. - let r = null + return { + acl: result.data.AccessControlList.Grant, + owner: { + id: result.data.Owner.ID, + displayName: result.data.Owner.DisplayName + }, + res: result.res + }; +}; - // 2. - if (request !== undefined) { - // 2.1 - if (request instanceof Request) { - // 2.1.1 - r = request[kState] - // 2.1.2 - if (r.method !== 'GET' && !options.ignoreMethod) { - return [] - } - } else if (typeof request === 'string') { // 2.2 - r = new Request(request)[kState] - } - } +/***/ }), - // 4. - const promise = createDeferredPromise() +/***/ 86595: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 5. - // 5.1 - const requests = [] +"use strict"; - // 5.2 - if (request === undefined) { - // 5.2.1 - for (const requestResponse of this.#relevantRequestResponseList) { - // 5.2.1.1 - requests.push(requestResponse[0]) - } - } else { // 5.3 - // 5.3.1 - const requestResponses = this.#queryCache(r, options) +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAsyncFetch = void 0; +const formatObjKey_1 = __nccwpck_require__(72095); +/* + * getAsyncFetch + * @param {String} asyncFetch taskId + * @param {Object} options + */ +async function getAsyncFetch(taskId, options = {}) { + options.subres = Object.assign({ asyncFetch: '' }, options.subres); + options.headers = options.headers || {}; + const params = this._objectRequestParams('GET', '', options); + params.headers['x-oss-task-id'] = taskId; + params.successStatuses = [200]; + params.xmlResponse = true; + const result = await this.request(params); + const taskInfo = formatObjKey_1.formatObjKey(result.data.TaskInfo, 'firstLowerCase'); + return { + res: result.res, + status: result.status, + state: result.data.State, + taskInfo + }; +} +exports.getAsyncFetch = getAsyncFetch; - // 5.3.2 - for (const requestResponse of requestResponses) { - // 5.3.2.1 - requests.push(requestResponse[0]) - } - } - // 5.4 - queueMicrotask(() => { - // 5.4.1 - const requestList = [] +/***/ }), - // 5.4.2 - for (const request of requests) { - const requestObject = new Request('https://a') - requestObject[kState] = request - requestObject[kHeaders][kHeadersList] = request.headersList - requestObject[kHeaders][kGuard] = 'immutable' - requestObject[kRealm] = request.client +/***/ 27406: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 5.4.2.1 - requestList.push(requestObject) +/* eslint-disable no-use-before-define */ +const proto = exports; +const { isObject } = __nccwpck_require__(81269); +const { isArray } = __nccwpck_require__(71954); + +proto.getBucketVersions = getBucketVersions; +proto.listObjectVersions = getBucketVersions; + +async function getBucketVersions(query = {}, options = {}) { + // prefix, key-marker, max-keys, delimiter, encoding-type, version-id-marker + if (query.versionIdMarker && query.keyMarker === undefined) { + throw new Error('A version-id marker cannot be specified without a key marker'); + } + + options.subres = Object.assign({ versions: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; + } + const params = this._objectRequestParams('GET', '', options); + params.xmlResponse = true; + params.successStatuses = [200]; + + params.query = formatQuery(query); + + const result = await this.request(params); + let objects = result.data.Version || []; + let deleteMarker = result.data.DeleteMarker || []; + const that = this; + if (objects) { + if (!Array.isArray(objects)) { + objects = [objects]; + } + objects = objects.map(obj => ({ + name: obj.Key, + url: that._objectUrl(obj.Key), + lastModified: obj.LastModified, + isLatest: obj.IsLatest === 'true', + versionId: obj.VersionId, + etag: obj.ETag, + type: obj.Type, + size: Number(obj.Size), + storageClass: obj.StorageClass, + owner: { + id: obj.Owner.ID, + displayName: obj.Owner.DisplayName } - - // 5.4.3 - promise.resolve(Object.freeze(requestList)) - }) - - return promise.promise + })); + } + if (deleteMarker) { + if (!isArray(deleteMarker)) { + deleteMarker = [deleteMarker]; + } + deleteMarker = deleteMarker.map(obj => ({ + name: obj.Key, + lastModified: obj.LastModified, + versionId: obj.VersionId, + owner: { + id: obj.Owner.ID, + displayName: obj.Owner.DisplayName + } + })); + } + let prefixes = result.data.CommonPrefixes || null; + if (prefixes) { + if (!isArray(prefixes)) { + prefixes = [prefixes]; + } + prefixes = prefixes.map(item => item.Prefix); + } + return { + res: result.res, + objects, + deleteMarker, + prefixes, + // attirbute of legacy error + nextMarker: result.data.NextKeyMarker || null, + // attirbute of legacy error + NextVersionIdMarker: result.data.NextVersionIdMarker || null, + nextKeyMarker: result.data.NextKeyMarker || null, + nextVersionIdMarker: result.data.NextVersionIdMarker || null, + isTruncated: result.data.IsTruncated === 'true' + }; +} + +function camel2Line(name) { + return name.replace(/([A-Z])/g, '-$1').toLowerCase(); +} + +function formatQuery(query = {}) { + const obj = {}; + if (isObject(query)) { + Object.keys(query).forEach(key => { + obj[camel2Line(key)] = query[key]; + }); } - /** - * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm - * @param {CacheBatchOperation[]} operations - * @returns {requestResponseList} - */ - #batchCacheOperations (operations) { - // 1. - const cache = this.#relevantRequestResponseList - - // 2. - const backupCache = [...cache] + return obj; +} - // 3. - const addedItems = [] - // 4.1 - const resultList = [] +/***/ }), - try { - // 4.2 - for (const operation of operations) { - // 4.2.1 - if (operation.type !== 'delete' && operation.type !== 'put') { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'operation type does not match "delete" or "put"' - }) - } +/***/ 14552: +/***/ ((__unused_webpack_module, exports) => { - // 4.2.2 - if (operation.type === 'delete' && operation.response != null) { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'delete operation should not have an associated response' - }) - } +const proto = exports; +/** + * getObjectMeta + * @param {String} name - object name + * @param {Object} options + * @param {{res}} + */ - // 4.2.3 - if (this.#queryCache(operation.request, operation.options, addedItems).length) { - throw new DOMException('???', 'InvalidStateError') - } +proto.getObjectMeta = async function getObjectMeta(name, options) { + options = options || {}; + name = this._objectName(name); + options.subres = Object.assign({ objectMeta: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; + } + const params = this._objectRequestParams('HEAD', name, options); + params.successStatuses = [200]; + const result = await this.request(params); + return { + status: result.status, + res: result.res + }; +}; - // 4.2.4 - let requestResponses - // 4.2.5 - if (operation.type === 'delete') { - // 4.2.5.1 - requestResponses = this.#queryCache(operation.request, operation.options) +/***/ }), - // TODO: the spec is wrong, this is needed to pass WPTs - if (requestResponses.length === 0) { - return [] - } +/***/ 99795: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 4.2.5.2 - for (const requestResponse of requestResponses) { - const idx = cache.indexOf(requestResponse) - assert(idx !== -1) +const proto = exports; +const { isObject } = __nccwpck_require__(81269); +/** + * getObjectTagging + * @param {String} name - object name + * @param {Object} options + * @return {Object} + */ - // 4.2.5.2.1 - cache.splice(idx, 1) - } - } else if (operation.type === 'put') { // 4.2.6 - // 4.2.6.1 - if (operation.response == null) { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'put operation should have an associated response' - }) - } +proto.getObjectTagging = async function getObjectTagging(name, options = {}) { + options.subres = Object.assign({ tagging: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; + } + name = this._objectName(name); + const params = this._objectRequestParams('GET', name, options); + params.successStatuses = [200]; + const result = await this.request(params); + const Tagging = await this.parseXML(result.data); + let { Tag } = Tagging.TagSet; + Tag = Tag && isObject(Tag) ? [Tag] : Tag || []; - // 4.2.6.2 - const r = operation.request + const tag = {}; - // 4.2.6.3 - if (!urlIsHttpHttpsScheme(r.url)) { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'expected http or https scheme' - }) - } + Tag.forEach(item => { + tag[item.Key] = item.Value; + }); - // 4.2.6.4 - if (r.method !== 'GET') { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'not get method' - }) - } + return { + status: result.status, + res: result.res, + tag + }; +}; - // 4.2.6.5 - if (operation.options != null) { - throw webidl.errors.exception({ - header: 'Cache.#batchCacheOperations', - message: 'options must not be defined' - }) - } - // 4.2.6.6 - requestResponses = this.#queryCache(operation.request) +/***/ }), - // 4.2.6.7 - for (const requestResponse of requestResponses) { - const idx = cache.indexOf(requestResponse) - assert(idx !== -1) +/***/ 95633: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 4.2.6.7.1 - cache.splice(idx, 1) - } +const { isIP } = __nccwpck_require__(42439); - // 4.2.6.8 - cache.push([operation.request, operation.response]) +const proto = exports; +/** + * Get Object url by name + * @param {String} name - object name + * @param {String} [baseUrl] - If provide `baseUrl`, + * will use `baseUrl` instead the default `endpoint`. + * @return {String} object url + */ +proto.getObjectUrl = function getObjectUrl(name, baseUrl) { + if (isIP(this.options.endpoint.hostname)) { + throw new Error('can not get the object URL when endpoint is IP'); + } + if (!baseUrl) { + baseUrl = this.options.endpoint.format(); + } else if (baseUrl[baseUrl.length - 1] !== '/') { + baseUrl += '/'; + } + return baseUrl + this._escape(this._objectName(name)); +}; - // 4.2.6.10 - addedItems.push([operation.request, operation.response]) - } - // 4.2.7 - resultList.push([operation.request, operation.response]) - } +/***/ }), - // 4.3 - return resultList - } catch (e) { // 5. - // 5.1 - this.#relevantRequestResponseList.length = 0 +/***/ 99797: +/***/ ((__unused_webpack_module, exports) => { - // 5.2 - this.#relevantRequestResponseList = backupCache +const proto = exports; +/** + * getSymlink + * @param {String} name - object name + * @param {Object} options + * @param {{res}} + */ - // 5.3 - throw e - } +proto.getSymlink = async function getSymlink(name, options = {}) { + options.subres = Object.assign({ symlink: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; } + name = this._objectName(name); + const params = this._objectRequestParams('GET', name, options); + params.successStatuses = [200]; + const result = await this.request(params); + const target = result.res.headers['x-oss-symlink-target']; + return { + targetName: decodeURIComponent(target), + res: result.res + }; +}; - /** - * @see https://w3c.github.io/ServiceWorker/#query-cache - * @param {any} requestQuery - * @param {import('../../types/cache').CacheQueryOptions} options - * @param {requestResponseList} targetStorage - * @returns {requestResponseList} - */ - #queryCache (requestQuery, options, targetStorage) { - /** @type {requestResponseList} */ - const resultList = [] - const storage = targetStorage ?? this.#relevantRequestResponseList +/***/ }), - for (const requestResponse of storage) { - const [cachedRequest, cachedResponse] = requestResponse - if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) { - resultList.push(requestResponse) - } - } +/***/ 10411: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return resultList +const { checkEnv } = __nccwpck_require__(97211); +const proto = exports; +/** + * head + * @param {String} name - object name + * @param {Object} options + * @param {{res}} + */ + +proto.head = async function head(name, options = {}) { + checkEnv( + 'Because HeadObject has gzip enabled, head cannot get the file size correctly. If you need to get the file size, please use getObjectMeta' + ); + options.subres = Object.assign({}, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; + } + const params = this._objectRequestParams('HEAD', name, options); + params.successStatuses = [200, 304]; + + const result = await this.request(params); + + const data = { + meta: null, + res: result.res, + status: result.status + }; + + if (result.status === 200) { + Object.keys(result.headers).forEach(k => { + if (k.indexOf('x-oss-meta-') === 0) { + if (!data.meta) { + data.meta = {}; + } + data.meta[k.substring(11)] = result.headers[k]; + } + }); } + return data; +}; - /** - * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm - * @param {any} requestQuery - * @param {any} request - * @param {any | null} response - * @param {import('../../types/cache').CacheQueryOptions | undefined} options - * @returns {boolean} - */ - #requestMatchesCachedItem (requestQuery, request, response = null, options) { - // if (options?.ignoreMethod === false && request.method === 'GET') { - // return false - // } - const queryURL = new URL(requestQuery.url) +/***/ }), - const cachedURL = new URL(request.url) +/***/ 48417: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (options?.ignoreSearch) { - cachedURL.search = '' +const merge = __nccwpck_require__(11149); + +const proto = exports; + +merge(proto, __nccwpck_require__(99797)); +merge(proto, __nccwpck_require__(29547)); +merge(proto, __nccwpck_require__(14552)); +merge(proto, __nccwpck_require__(45815)); +merge(proto, __nccwpck_require__(32273)); +merge(proto, __nccwpck_require__(99795)); +merge(proto, __nccwpck_require__(34972)); +merge(proto, __nccwpck_require__(17031)); +merge(proto, __nccwpck_require__(27406)); +merge(proto, __nccwpck_require__(53103)); +merge(proto, __nccwpck_require__(86599)); +merge(proto, __nccwpck_require__(95366)); +merge(proto, __nccwpck_require__(10411)); +merge(proto, __nccwpck_require__(45515)); +merge(proto, __nccwpck_require__(21973)); +merge(proto, __nccwpck_require__(70501)); +merge(proto, __nccwpck_require__(86595)); +merge(proto, __nccwpck_require__(42659)); +merge(proto, __nccwpck_require__(95633)); +merge(proto, __nccwpck_require__(3252)); +merge(proto, __nccwpck_require__(17330)); +merge(proto, __nccwpck_require__(14598)); - queryURL.search = '' - } - if (!urlEquals(queryURL, cachedURL, true)) { - return false - } +/***/ }), - if ( - response == null || - options?.ignoreVary || - !response.headersList.contains('vary') - ) { - return true - } +/***/ 70501: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const fieldValues = getFieldValues(response.headersList.get('vary')) +"use strict"; - for (const fieldValue of fieldValues) { - if (fieldValue === '*') { - return false - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.postAsyncFetch = void 0; +const obj2xml_1 = __nccwpck_require__(96846); +/* + * postAsyncFetch + * @param {String} name the object key + * @param {String} url + * @param {Object} options + * {String} options.host + * {String} options.contentMD5 + * {String} options.callback + * {String} options.storageClass Standard/IA/Archive + * {Boolean} options.ignoreSameKey default value true + */ +async function postAsyncFetch(object, url, options = {}) { + options.subres = Object.assign({ asyncFetch: '' }, options.subres); + options.headers = options.headers || {}; + object = this._objectName(object); + const { host = '', contentMD5 = '', callback = '', storageClass = '', ignoreSameKey = true } = options; + const paramXMLObj = { + AsyncFetchTaskConfiguration: { + Url: url, + Object: object, + Host: host, + ContentMD5: contentMD5, + Callback: callback, + StorageClass: storageClass, + IgnoreSameKey: ignoreSameKey + } + }; + const params = this._objectRequestParams('POST', '', options); + params.mime = 'xml'; + params.xmlResponse = true; + params.successStatuses = [200]; + params.content = obj2xml_1.obj2xml(paramXMLObj); + const result = await this.request(params); + return { + res: result.res, + status: result.status, + taskId: result.data.TaskId + }; +} +exports.postAsyncFetch = postAsyncFetch; - const requestValue = request.headersList.get(fieldValue) - const queryValue = requestQuery.headersList.get(fieldValue) - // If one has the header and the other doesn't, or one has - // a different value than the other, return false - if (requestValue !== queryValue) { - return false - } - } +/***/ }), - return true - } -} +/***/ 95366: +/***/ ((__unused_webpack_module, exports) => { -Object.defineProperties(Cache.prototype, { - [Symbol.toStringTag]: { - value: 'Cache', - configurable: true - }, - match: kEnumerableProperty, - matchAll: kEnumerableProperty, - add: kEnumerableProperty, - addAll: kEnumerableProperty, - put: kEnumerableProperty, - delete: kEnumerableProperty, - keys: kEnumerableProperty -}) +const proto = exports; -const cacheQueryOptionConverters = [ - { - key: 'ignoreSearch', - converter: webidl.converters.boolean, - defaultValue: false - }, - { - key: 'ignoreMethod', - converter: webidl.converters.boolean, - defaultValue: false - }, - { - key: 'ignoreVary', - converter: webidl.converters.boolean, - defaultValue: false +/* + * Set object's ACL + * @param {String} name the object key + * @param {String} acl the object ACL + * @param {Object} options + */ +proto.putACL = async function putACL(name, acl, options) { + options = options || {}; + options.subres = Object.assign({ acl: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; } -] + options.headers = options.headers || {}; + options.headers['x-oss-object-acl'] = acl; + name = this._objectName(name); -webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters) + const params = this._objectRequestParams('PUT', name, options); + params.successStatuses = [200]; -webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([ - ...cacheQueryOptionConverters, - { - key: 'cacheName', - converter: webidl.converters.DOMString - } -]) + const result = await this.request(params); -webidl.converters.Response = webidl.interfaceConverter(Response) + return { + res: result.res + }; +}; -webidl.converters['sequence'] = webidl.sequenceConverter( - webidl.converters.RequestInfo -) -module.exports = { - Cache -} +/***/ }), +/***/ 34972: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -/***/ }), +const { obj2xml } = __nccwpck_require__(96846); +const { checkObjectTag } = __nccwpck_require__(40849); -/***/ 7907: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +const proto = exports; +/** + * putObjectTagging + * @param {String} name - object name + * @param {Object} tag - object tag, eg: `{a: "1", b: "2"}` + * @param {Object} options + */ -"use strict"; +proto.putObjectTagging = async function putObjectTagging(name, tag, options = {}) { + checkObjectTag(tag); + + options.subres = Object.assign({ tagging: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; + } + name = this._objectName(name); + const params = this._objectRequestParams('PUT', name, options); + params.successStatuses = [200]; + tag = Object.keys(tag).map(key => ({ + Key: key, + Value: tag[key] + })); + + const paramXMLObj = { + Tagging: { + TagSet: { + Tag: tag + } + } + }; + params.mime = 'xml'; + params.content = obj2xml(paramXMLObj); -const { kConstruct } = __nccwpck_require__(9174) -const { Cache } = __nccwpck_require__(6101) -const { webidl } = __nccwpck_require__(1744) -const { kEnumerableProperty } = __nccwpck_require__(3983) + const result = await this.request(params); + return { + res: result.res, + status: result.status + }; +}; -class CacheStorage { - /** - * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map - * @type {Map { + +const proto = exports; +/** + * putSymlink + * @param {String} name - object name + * @param {String} targetName - target name + * @param {Object} options + * @param {{res}} + */ + +proto.putSymlink = async function putSymlink(name, targetName, options) { + options = options || {}; + options.headers = options.headers || {}; + targetName = this._escape(this._objectName(targetName)); + this._convertMetaToHeaders(options.meta, options.headers); + options.headers['x-oss-symlink-target'] = targetName; + options.subres = Object.assign({ symlink: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; } - async match (request, options = {}) { - webidl.brandCheck(this, CacheStorage) - webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.match' }) + if (options.storageClass) { + options.headers['x-oss-storage-class'] = options.storageClass; + } - request = webidl.converters.RequestInfo(request) - options = webidl.converters.MultiCacheQueryOptions(options) + name = this._objectName(name); + const params = this._objectRequestParams('PUT', name, options); - // 1. - if (options.cacheName != null) { - // 1.1.1.1 - if (this.#caches.has(options.cacheName)) { - // 1.1.1.1.1 - const cacheList = this.#caches.get(options.cacheName) - const cache = new Cache(kConstruct, cacheList) + params.successStatuses = [200]; + const result = await this.request(params); + return { + res: result.res + }; +}; - return await cache.match(request, options) - } - } else { // 2. - // 2.2 - for (const cacheList of this.#caches.values()) { - const cache = new Cache(kConstruct, cacheList) - // 2.2.1.2 - const response = await cache.match(request, options) +/***/ }), - if (response !== undefined) { - return response - } - } - } +/***/ 3252: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +const urlutil = __nccwpck_require__(57310); +const utility = __nccwpck_require__(33877); +const copy = __nccwpck_require__(30952); +const signHelper = __nccwpck_require__(5150); +const { isIP } = __nccwpck_require__(42439); + +const proto = exports; + +/** + * signatureUrl + * @deprecated will be deprecated in 7.x + * @param {String} name object name + * @param {Object} options options + * @param {boolean} [strictObjectNameValidation=true] the flag of verifying object name strictly + */ +proto.signatureUrl = function signatureUrl(name, options, strictObjectNameValidation = true) { + if (isIP(this.options.endpoint.hostname)) { + throw new Error('can not get the object URL when endpoint is IP'); } - /** - * @see https://w3c.github.io/ServiceWorker/#cache-storage-has - * @param {string} cacheName - * @returns {Promise} - */ - async has (cacheName) { - webidl.brandCheck(this, CacheStorage) - webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' }) + if (strictObjectNameValidation && /^\?/.test(name)) { + throw new Error(`Invalid object name ${name}`); + } - cacheName = webidl.converters.DOMString(cacheName) + options = options || {}; + name = this._objectName(name); + options.method = options.method || 'GET'; + const expires = utility.timestamp() + (options.expires || 1800); + const params = { + bucket: this.options.bucket, + object: name + }; - // 2.1.1 - // 2.2 - return this.#caches.has(cacheName) + const resource = this._getResource(params); + + if (this.options.stsToken) { + options['security-token'] = this.options.stsToken; } - /** - * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open - * @param {string} cacheName - * @returns {Promise} - */ - async open (cacheName) { - webidl.brandCheck(this, CacheStorage) - webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' }) + const signRes = signHelper._signatureForURL(this.options.accessKeySecret, options, resource, expires); - cacheName = webidl.converters.DOMString(cacheName) + const url = urlutil.parse(this._getReqUrl(params)); + url.query = { + OSSAccessKeyId: this.options.accessKeyId, + Expires: expires, + Signature: signRes.Signature + }; - // 2.1 - if (this.#caches.has(cacheName)) { - // await caches.open('v1') !== await caches.open('v1') + copy(signRes.subResource).to(url.query); - // 2.1.1 - const cache = this.#caches.get(cacheName) + return url.format(); +}; - // 2.1.1.1 - return new Cache(kConstruct, cache) - } - // 2.2 - const cache = [] +/***/ }), - // 2.3 - this.#caches.set(cacheName, cache) +/***/ 14598: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 2.4 - return new Cache(kConstruct, cache) - } +const dateFormat = __nccwpck_require__(51512); +const urlUtil = __nccwpck_require__(57310); - /** - * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete - * @param {string} cacheName - * @returns {Promise} - */ - async delete (cacheName) { - webidl.brandCheck(this, CacheStorage) - webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' }) +const signHelper = __nccwpck_require__(5150); +const { setSTSToken } = __nccwpck_require__(84622); +const { isFunction } = __nccwpck_require__(18818); +const { getStandardRegion } = __nccwpck_require__(98423); - cacheName = webidl.converters.DOMString(cacheName) +const proto = exports; - return this.#caches.delete(cacheName) - } +/** + * signatureUrlV4 + * + * @param {string} method + * @param {number} expires + * @param {Object} [request] + * @param {Object} [request.headers] + * @param {Object} [request.queries] + * @param {string} [objectName] + * @param {string[]} [additionalHeaders] + */ +proto.signatureUrlV4 = async function signatureUrlV4(method, expires, request, objectName, additionalHeaders) { + const headers = (request && request.headers) || {}; + const queries = { ...((request && request.queries) || {}) }; + const date = new Date(); + const formattedDate = dateFormat(date, "UTC:yyyymmdd'T'HHMMss'Z'"); + const onlyDate = formattedDate.split('T')[0]; + const fixedAdditionalHeaders = signHelper.fixAdditionalHeaders(additionalHeaders); + const region = getStandardRegion(this.options.region); - /** - * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys - * @returns {string[]} - */ - async keys () { - webidl.brandCheck(this, CacheStorage) + if (fixedAdditionalHeaders.length > 0) { + queries['x-oss-additional-headers'] = fixedAdditionalHeaders.join(';'); + } + queries['x-oss-credential'] = `${this.options.accessKeyId}/${onlyDate}/${region}/oss/aliyun_v4_request`; + queries['x-oss-date'] = formattedDate; + queries['x-oss-expires'] = expires; + queries['x-oss-signature-version'] = 'OSS4-HMAC-SHA256'; - // 2.1 - const keys = this.#caches.keys() + if (this.options.stsToken && isFunction(this.options.refreshSTSToken)) { + await setSTSToken.call(this); + } - // 2.2 - return [...keys] + if (this.options.stsToken) { + queries['x-oss-security-token'] = this.options.stsToken; } -} -Object.defineProperties(CacheStorage.prototype, { - [Symbol.toStringTag]: { - value: 'CacheStorage', - configurable: true - }, - match: kEnumerableProperty, - has: kEnumerableProperty, - open: kEnumerableProperty, - delete: kEnumerableProperty, - keys: kEnumerableProperty -}) + const canonicalRequest = signHelper.getCanonicalRequest( + method, + { + headers, + queries + }, + this.options.bucket, + objectName, + fixedAdditionalHeaders + ); + const stringToSign = signHelper.getStringToSign(region, formattedDate, canonicalRequest); + + queries['x-oss-signature'] = signHelper.getSignatureV4(this.options.accessKeySecret, onlyDate, region, stringToSign); + + const signedUrl = urlUtil.parse( + this._getReqUrl({ + bucket: this.options.bucket, + object: objectName + }) + ); + signedUrl.query = { ...queries }; -module.exports = { - CacheStorage -} + return signedUrl.format(); +}; /***/ }), -/***/ 9174: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 80986: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; +const { isArray } = __nccwpck_require__(71954); +const proto = exports; -module.exports = { - kConstruct: (__nccwpck_require__(2785).kConstruct) -} +proto._parallelNode = async function _parallelNode(todo, parallel, fn, sourceData) { + const that = this; + // upload in parallel + const jobErr = []; + let jobs = []; + const tempBatch = todo.length / parallel; + const remainder = todo.length % parallel; + const batch = remainder === 0 ? tempBatch : (todo.length - remainder) / parallel + 1; + let taskIndex = 1; + for (let i = 0; i < todo.length; i++) { + if (that.isCancel()) { + break; + } + if (sourceData) { + jobs.push(fn(that, todo[i], sourceData)); + } else { + jobs.push(fn(that, todo[i])); + } -/***/ }), + if (jobs.length === parallel || (taskIndex === batch && i === todo.length - 1)) { + try { + taskIndex += 1; + /* eslint no-await-in-loop: [0] */ + await Promise.all(jobs); + } catch (err) { + jobErr.push(err); + } + jobs = []; + } + } -/***/ 2396: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return jobErr; +}; -"use strict"; +proto._parallel = function _parallel(todo, parallel, jobPromise) { + const that = this; + return new Promise(resolve => { + const _jobErr = []; + if (parallel <= 0 || !todo) { + resolve(_jobErr); + return; + } + function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error('Callback was already called.'); + const callFn = fn; + fn = null; + callFn.apply(this, args); + }; + } + + function createArrayIterator(coll) { + let i = -1; + const len = coll.length; + return function next() { + return ++i < len && !that.isCancel() ? { value: coll[i], key: i } : null; + }; + } + + const nextElem = createArrayIterator(todo); + let done = false; + let running = 0; + let looping = false; + + function iterateeCallback(err) { + running -= 1; + if (err) { + done = true; + _jobErr.push(err); + resolve(_jobErr); + } else if (done && running <= 0) { + done = true; + resolve(_jobErr); + } else if (!looping) { + /* eslint no-use-before-define: [0] */ + if (that.isCancel()) { + resolve(_jobErr); + } else { + replenish(); + } + } + } -const assert = __nccwpck_require__(9491) -const { URLSerializer } = __nccwpck_require__(685) -const { isValidHeaderName } = __nccwpck_require__(2538) + function iteratee(value, callback) { + jobPromise(value) + .then(result => { + callback(null, result); + }) + .catch(err => { + callback(err); + }); + } + + function replenish() { + looping = true; + while (running < parallel && !done && !that.isCancel()) { + const elem = nextElem(); + if (elem === null || _jobErr.length > 0) { + done = true; + if (running <= 0) { + resolve(_jobErr); + } + return; + } + running += 1; + iteratee(elem.value, onlyOnce(iterateeCallback)); + } + looping = false; + } + + replenish(); + }); +}; /** - * @see https://url.spec.whatwg.org/#concept-url-equals - * @param {URL} A - * @param {URL} B - * @param {boolean | undefined} excludeFragment - * @returns {boolean} + * cancel operation, now can use with multipartUpload + * @param {Object} abort + * {String} anort.name object key + * {String} anort.uploadId upload id + * {String} anort.options timeout */ -function urlEquals (A, B, excludeFragment = false) { - const serializedA = URLSerializer(A, excludeFragment) +proto.cancel = function cancel(abort) { + this.options.cancelFlag = true; + + if (isArray(this.multipartUploadStreams)) { + this.multipartUploadStreams.forEach(_ => { + if (_.destroyed === false) { + const err = { + name: 'cancel', + message: 'cancel' + }; + _.destroy(err); + } + }); + } + this.multipartUploadStreams = []; + if (abort) { + this.abortMultipartUpload(abort.name, abort.uploadId, abort.options); + } +}; - const serializedB = URLSerializer(B, excludeFragment) +proto.isCancel = function isCancel() { + return this.options.cancelFlag; +}; - return serializedA === serializedB -} +proto.resetCancelFlag = function resetCancelFlag() { + this.options.cancelFlag = false; +}; + +proto._stop = function _stop() { + this.options.cancelFlag = true; +}; + +// cancel is not error , so create an object +proto._makeCancelEvent = function _makeCancelEvent() { + const cancelEvent = { + status: 0, + name: 'cancel' + }; + return cancelEvent; +}; + +// abort is not error , so create an object +proto._makeAbortEvent = function _makeAbortEvent() { + const abortEvent = { + status: 0, + name: 'abort', + message: 'upload task has been abort' + }; + return abortEvent; +}; + + +/***/ }), + +/***/ 5150: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +const crypto = __nccwpck_require__(6113); +const is = __nccwpck_require__(65565); +const qs = __nccwpck_require__(22760); +const { lowercaseKeyHeader } = __nccwpck_require__(75285); +const { encodeString } = __nccwpck_require__(47563); /** - * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262 - * @param {string} header + * + * @param {String} resourcePath + * @param {Object} parameters + * @return */ -function fieldValues (header) { - assert(header !== null) - - const values = [] +exports.buildCanonicalizedResource = function buildCanonicalizedResource(resourcePath, parameters) { + let canonicalizedResource = `${resourcePath}`; + let separatorString = '?'; + + if (is.string(parameters) && parameters.trim() !== '') { + canonicalizedResource += separatorString + parameters; + } else if (is.array(parameters)) { + parameters.sort(); + canonicalizedResource += separatorString + parameters.join('&'); + } else if (parameters) { + const compareFunc = (entry1, entry2) => { + if (entry1[0] > entry2[0]) { + return 1; + } else if (entry1[0] < entry2[0]) { + return -1; + } + return 0; + }; + const processFunc = key => { + canonicalizedResource += separatorString + key; + if (parameters[key] || parameters[key] === 0) { + canonicalizedResource += `=${parameters[key]}`; + } + separatorString = '&'; + }; + Object.keys(parameters).sort(compareFunc).forEach(processFunc); + } - for (let value of header.split(',')) { - value = value.trim() + return canonicalizedResource; +}; - if (!value.length) { - continue - } else if (!isValidHeaderName(value)) { - continue +/** + * @param {String} method + * @param {String} resourcePath + * @param {Object} request + * @param {String} expires + * @return {String} canonicalString + */ +exports.buildCanonicalString = function canonicalString(method, resourcePath, request, expires) { + request = request || {}; + const headers = lowercaseKeyHeader(request.headers); + const OSS_PREFIX = 'x-oss-'; + const ossHeaders = []; + const headersToSign = {}; + + let signContent = [ + method.toUpperCase(), + headers['content-md5'] || '', + headers['content-type'], + expires || headers['x-oss-date'] + ]; + + Object.keys(headers).forEach(key => { + const lowerKey = key.toLowerCase(); + if (lowerKey.indexOf(OSS_PREFIX) === 0) { + headersToSign[lowerKey] = String(headers[key]).trim(); } + }); - values.push(value) - } + Object.keys(headersToSign) + .sort() + .forEach(key => { + ossHeaders.push(`${key}:${headersToSign[key]}`); + }); - return values -} + signContent = signContent.concat(ossHeaders); -module.exports = { - urlEquals, - fieldValues -} + signContent.push(this.buildCanonicalizedResource(resourcePath, request.parameters)); + return signContent.join('\n'); +}; -/***/ }), +/** + * @param {String} accessKeySecret + * @param {String} canonicalString + */ +exports.computeSignature = function computeSignature(accessKeySecret, canonicalString, headerEncoding = 'utf-8') { + const signature = crypto.createHmac('sha1', accessKeySecret); + return signature.update(Buffer.from(canonicalString, headerEncoding)).digest('base64'); +}; -/***/ 3598: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/** + * @param {String} accessKeyId + * @param {String} accessKeySecret + * @param {String} canonicalString + */ +exports.authorization = function authorization(accessKeyId, accessKeySecret, canonicalString, headerEncoding) { + return `OSS ${accessKeyId}:${this.computeSignature(accessKeySecret, canonicalString, headerEncoding)}`; +}; -"use strict"; -// @ts-check +/** + * @param {string[]} [additionalHeaders] + * @returns {string[]} + */ +exports.fixAdditionalHeaders = additionalHeaders => { + if (!additionalHeaders) { + return []; + } + const OSS_PREFIX = 'x-oss-'; + return [...new Set(additionalHeaders.map(v => v.toLowerCase()))] + .filter(v => { + return v !== 'content-type' && v !== 'content-md5' && !v.startsWith(OSS_PREFIX); + }) + .sort(); +}; -/* global WebAssembly */ +/** + * @param {string} method + * @param {Object} request + * @param {Object} request.headers + * @param {Object} [request.queries] + * @param {string} [bucketName] + * @param {string} [objectName] + * @param {string[]} [additionalHeaders] additional headers after deduplication, lowercase and sorting + * @returns {string} + */ +exports.getCanonicalRequest = function getCanonicalRequest(method, request, bucketName, objectName, additionalHeaders) { + const headers = lowercaseKeyHeader(request.headers); + const queries = request.queries || {}; + const OSS_PREFIX = 'x-oss-'; + + if (objectName && !bucketName) { + throw Error('Please ensure that bucketName is passed into getCanonicalRequest.'); + } + + const signContent = [ + method.toUpperCase(), // HTTP Verb + encodeString(`/${bucketName ? `${bucketName}/` : ''}${objectName || ''}`).replace(/%2F/g, '/') // Canonical URI + ]; + + // Canonical Query String + signContent.push( + qs.stringify(queries, { + encoder: encodeString, + sort: (a, b) => a.localeCompare(b), + strictNullHandling: true + }) + ); -const assert = __nccwpck_require__(9491) -const net = __nccwpck_require__(1808) -const http = __nccwpck_require__(3685) -const { pipeline } = __nccwpck_require__(2781) -const util = __nccwpck_require__(3983) -const timers = __nccwpck_require__(9459) -const Request = __nccwpck_require__(2905) -const DispatcherBase = __nccwpck_require__(4839) -const { - RequestContentLengthMismatchError, - ResponseContentLengthMismatchError, - InvalidArgumentError, - RequestAbortedError, - HeadersTimeoutError, - HeadersOverflowError, - SocketError, - InformationalError, - BodyTimeoutError, - HTTPParserError, - ResponseExceededMaxSizeError, - ClientDestroyedError -} = __nccwpck_require__(8045) -const buildConnector = __nccwpck_require__(2067) -const { - kUrl, - kReset, - kServerName, - kClient, - kBusy, - kParser, - kConnect, - kBlocking, - kResuming, - kRunning, - kPending, - kSize, - kWriting, - kQueue, - kConnected, - kConnecting, - kNeedDrain, - kNoRef, - kKeepAliveDefaultTimeout, - kHostHeader, - kPendingIdx, - kRunningIdx, - kError, - kPipelining, - kSocket, - kKeepAliveTimeoutValue, - kMaxHeadersSize, - kKeepAliveMaxTimeout, - kKeepAliveTimeoutThreshold, - kHeadersTimeout, - kBodyTimeout, - kStrictContentLength, - kConnector, - kMaxRedirections, - kMaxRequests, - kCounter, - kClose, - kDestroy, - kDispatch, - kInterceptors, - kLocalAddress, - kMaxResponseSize, - kHTTPConnVersion, - // HTTP2 - kHost, - kHTTP2Session, - kHTTP2SessionState, - kHTTP2BuildRequest, - kHTTP2CopyHeaders, - kHTTP1BuildRequest -} = __nccwpck_require__(2785) + // Canonical Headers + if (additionalHeaders) { + additionalHeaders.forEach(v => { + if (!Object.prototype.hasOwnProperty.call(headers, v)) { + throw Error(`Can't find additional header ${v} in request headers.`); + } + }); + } -/** @type {import('http2')} */ -let http2 -try { - http2 = __nccwpck_require__(5158) -} catch { - // @ts-ignore - http2 = { constants: {} } -} + const tempHeaders = new Set(additionalHeaders); -const { - constants: { - HTTP2_HEADER_AUTHORITY, - HTTP2_HEADER_METHOD, - HTTP2_HEADER_PATH, - HTTP2_HEADER_SCHEME, - HTTP2_HEADER_CONTENT_LENGTH, - HTTP2_HEADER_EXPECT, - HTTP2_HEADER_STATUS + Object.keys(headers).forEach(v => { + if (v === 'content-type' || v === 'content-md5' || v.startsWith(OSS_PREFIX)) { + tempHeaders.add(v); + } + }); + + const canonicalHeaders = `${[...tempHeaders] + .sort() + .map(v => `${v}:${is.string(headers[v]) ? headers[v].trim() : headers[v]}\n`) + .join('')}`; + + signContent.push(canonicalHeaders); + + // Additional Headers + if (additionalHeaders.length > 0) { + signContent.push(additionalHeaders.join(';')); + } else { + signContent.push(''); } -} = http2 -// Experimental -let h2ExperimentalWarned = false + // Hashed Payload + signContent.push(headers['x-oss-content-sha256'] || 'UNSIGNED-PAYLOAD'); -const FastBuffer = Buffer[Symbol.species] + return signContent.join('\n'); +}; -const kClosedResolve = Symbol('kClosedResolve') +/** + * @param {string} region Standard region, e.g. cn-hangzhou + * @param {string} date ISO8601 UTC:yyyymmdd'T'HHMMss'Z' + * @param {string} canonicalRequest + * @returns {string} + */ +exports.getStringToSign = function getStringToSign(region, date, canonicalRequest) { + const stringToSign = [ + 'OSS4-HMAC-SHA256', + date, // TimeStamp + `${date.split('T')[0]}/${region}/oss/aliyun_v4_request`, // Scope + crypto.createHash('sha256').update(canonicalRequest).digest('hex') // Hashed Canonical Request + ]; + + return stringToSign.join('\n'); +}; -const channels = {} +/** + * @param {String} accessKeySecret + * @param {string} date yyyymmdd + * @param {string} region Standard region, e.g. cn-hangzhou + * @param {string} stringToSign + * @returns {string} + */ +exports.getSignatureV4 = function getSignatureV4(accessKeySecret, date, region, stringToSign) { + const signingDate = crypto.createHmac('sha256', `aliyun_v4${accessKeySecret}`).update(date).digest(); + const signingRegion = crypto.createHmac('sha256', signingDate).update(region).digest(); + const signingOss = crypto.createHmac('sha256', signingRegion).update('oss').digest(); + const signingKey = crypto.createHmac('sha256', signingOss).update('aliyun_v4_request').digest(); + const signatureValue = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex'); + + return signatureValue; +}; -try { - const diagnosticsChannel = __nccwpck_require__(7643) - channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders') - channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect') - channels.connectError = diagnosticsChannel.channel('undici:client:connectError') - channels.connected = diagnosticsChannel.channel('undici:client:connected') -} catch { - channels.sendHeaders = { hasSubscribers: false } - channels.beforeConnect = { hasSubscribers: false } - channels.connectError = { hasSubscribers: false } - channels.connected = { hasSubscribers: false } -} +/** + * @param {String} accessKeyId + * @param {String} accessKeySecret + * @param {string} region Standard region, e.g. cn-hangzhou + * @param {string} method + * @param {Object} request + * @param {Object} request.headers + * @param {Object} [request.queries] + * @param {string} [bucketName] + * @param {string} [objectName] + * @param {string[]} [additionalHeaders] + * @param {string} [headerEncoding='utf-8'] + * @returns {string} + */ +exports.authorizationV4 = function authorizationV4( + accessKeyId, + accessKeySecret, + region, + method, + request, + bucketName, + objectName, + additionalHeaders, + headerEncoding = 'utf-8' +) { + const fixedAdditionalHeaders = this.fixAdditionalHeaders(additionalHeaders); + const fixedHeaders = {}; + Object.entries(request.headers).forEach(v => { + fixedHeaders[v[0]] = is.string(v[1]) ? Buffer.from(v[1], headerEncoding).toString() : v[1]; + }); + const date = fixedHeaders['x-oss-date'] || (request.queries && request.queries['x-oss-date']); + const canonicalRequest = this.getCanonicalRequest( + method, + { + headers: fixedHeaders, + queries: request.queries + }, + bucketName, + objectName, + fixedAdditionalHeaders + ); + const stringToSign = this.getStringToSign(region, date, canonicalRequest); + const onlyDate = date.split('T')[0]; + const signatureValue = this.getSignatureV4(accessKeySecret, onlyDate, region, stringToSign); + const additionalHeadersValue = + fixedAdditionalHeaders.length > 0 ? `AdditionalHeaders=${fixedAdditionalHeaders.join(';')},` : ''; + + return `OSS4-HMAC-SHA256 Credential=${accessKeyId}/${onlyDate}/${region}/oss/aliyun_v4_request,${additionalHeadersValue}Signature=${signatureValue}`; +}; /** - * @type {import('../types/client').default} + * + * @param {String} accessKeySecret + * @param {Object} options + * @param {String} resource + * @param {Number} expires */ -class Client extends DispatcherBase { - /** - * - * @param {string|URL} url - * @param {import('../types/client').Client.Options} options - */ - constructor (url, { - interceptors, - maxHeaderSize, - headersTimeout, - socketTimeout, - requestTimeout, - connectTimeout, - bodyTimeout, - idleTimeout, - keepAlive, - keepAliveTimeout, - maxKeepAliveTimeout, - keepAliveMaxTimeout, - keepAliveTimeoutThreshold, - socketPath, - pipelining, - tls, - strictContentLength, - maxCachedSessions, - maxRedirections, - connect, - maxRequestsPerClient, - localAddress, - maxResponseSize, - autoSelectFamily, - autoSelectFamilyAttemptTimeout, - // h2 - allowH2, - maxConcurrentStreams - } = {}) { - super() +exports._signatureForURL = function _signatureForURL(accessKeySecret, options = {}, resource, expires, headerEncoding) { + const headers = {}; + const { subResource = {} } = options; - if (keepAlive !== undefined) { - throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead') - } + if (options.process) { + const processKeyword = 'x-oss-process'; + subResource[processKeyword] = options.process; + } - if (socketTimeout !== undefined) { - throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead') - } + if (options.trafficLimit) { + const trafficLimitKey = 'x-oss-traffic-limit'; + subResource[trafficLimitKey] = options.trafficLimit; + } - if (requestTimeout !== undefined) { - throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead') - } + if (options.response) { + Object.keys(options.response).forEach(k => { + const key = `response-${k.toLowerCase()}`; + subResource[key] = options.response[k]; + }); + } - if (idleTimeout !== undefined) { - throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead') + Object.keys(options).forEach(key => { + const lowerKey = key.toLowerCase(); + const value = options[key]; + if (lowerKey.indexOf('x-oss-') === 0) { + headers[lowerKey] = value; + } else if (lowerKey.indexOf('content-md5') === 0) { + headers[key] = value; + } else if (lowerKey.indexOf('content-type') === 0) { + headers[key] = value; } + }); - if (maxKeepAliveTimeout !== undefined) { - throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead') - } + if (Object.prototype.hasOwnProperty.call(options, 'security-token')) { + subResource['security-token'] = options['security-token']; + } - if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) { - throw new InvalidArgumentError('invalid maxHeaderSize') + if (Object.prototype.hasOwnProperty.call(options, 'callback')) { + const json = { + callbackUrl: encodeURI(options.callback.url), + callbackBody: options.callback.body + }; + if (options.callback.host) { + json.callbackHost = options.callback.host; } - - if (socketPath != null && typeof socketPath !== 'string') { - throw new InvalidArgumentError('invalid socketPath') + if (options.callback.contentType) { + json.callbackBodyType = options.callback.contentType; } + subResource.callback = Buffer.from(JSON.stringify(json)).toString('base64'); - if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) { - throw new InvalidArgumentError('invalid connectTimeout') + if (options.callback.customValue) { + const callbackVar = {}; + Object.keys(options.callback.customValue).forEach(key => { + callbackVar[`x:${key}`] = options.callback.customValue[key]; + }); + subResource['callback-var'] = Buffer.from(JSON.stringify(callbackVar)).toString('base64'); } + } - if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) { - throw new InvalidArgumentError('invalid keepAliveTimeout') - } + const canonicalString = this.buildCanonicalString( + options.method, + resource, + { + headers, + parameters: subResource + }, + expires.toString() + ); - if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) { - throw new InvalidArgumentError('invalid keepAliveMaxTimeout') - } + return { + Signature: this.computeSignature(accessKeySecret, canonicalString, headerEncoding), + subResource + }; +}; - if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) { - throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold') - } - if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) { - throw new InvalidArgumentError('headersTimeout must be a positive integer or zero') - } +/***/ }), - if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) { - throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero') - } +/***/ 30922: +/***/ ((__unused_webpack_module, exports) => { - if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { - throw new InvalidArgumentError('connect must be a function or an object') - } +"use strict"; - if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { - throw new InvalidArgumentError('maxRedirections must be a positive number') +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBucketName = void 0; +exports.checkBucketName = (name, createBucket = false) => { + const bucketRegex = createBucket ? /^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$/ : /^[a-z0-9_][a-z0-9-_]{1,61}[a-z0-9_]$/; + if (!bucketRegex.test(name)) { + throw new Error('The bucket must be conform to the specifications'); } +}; - if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) { - throw new InvalidArgumentError('maxRequestsPerClient must be a positive number') - } - if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) { - throw new InvalidArgumentError('localAddress must be valid string IP address') - } +/***/ }), - if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) { - throw new InvalidArgumentError('maxResponseSize must be a positive number') - } +/***/ 38694: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if ( - autoSelectFamilyAttemptTimeout != null && - (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1) - ) { - throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number') - } +"use strict"; - // h2 - if (allowH2 != null && typeof allowH2 !== 'boolean') { - throw new InvalidArgumentError('allowH2 must be a valid boolean value') +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBucketTag = void 0; +const { checkValid } = __nccwpck_require__(67355); +const { isObject } = __nccwpck_require__(81269); +const commonRules = [ + { + validator: value => { + if (typeof value !== 'string') { + throw new Error('the key and value of the tag must be String'); + } + } } +]; +const rules = { + key: [ + ...commonRules, + { + pattern: /^.{1,64}$/, + msg: 'tag key can be a maximum of 64 bytes in length' + }, + { + pattern: /^(?!https*:\/\/|Aliyun)/, + msg: 'tag key can not startsWith: http://, https://, Aliyun' + } + ], + value: [ + ...commonRules, + { + pattern: /^.{0,128}$/, + msg: 'tag value can be a maximum of 128 bytes in length' + } + ] +}; +exports.checkBucketTag = (tag) => { + if (!isObject(tag)) { + throw new Error('bucket tag must be Object'); + } + const entries = Object.entries(tag); + if (entries.length > 20) { + throw new Error('maximum of 20 tags for a bucket'); + } + const rulesIndexKey = ['key', 'value']; + entries.forEach(keyValue => { + keyValue.forEach((item, index) => { + checkValid(item, rules[rulesIndexKey[index]]); + }); + }); +}; - if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) { - throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0') + +/***/ }), + +/***/ 27324: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkConfigValid = void 0; +const checkConfigMap = { + endpoint: checkEndpoint, + region: /^[a-zA-Z0-9\-_]+$/ +}; +function checkEndpoint(endpoint) { + if (typeof endpoint === 'string') { + return /^[a-zA-Z0-9._:/-]+$/.test(endpoint); + } + else if (endpoint.host) { + return /^[a-zA-Z0-9._:/-]+$/.test(endpoint.host); + } + return false; +} +exports.checkConfigValid = (conf, key) => { + if (checkConfigMap[key]) { + let isConfigValid = true; + if (checkConfigMap[key] instanceof Function) { + isConfigValid = checkConfigMap[key](conf); + } + else { + isConfigValid = checkConfigMap[key].test(conf); + } + if (!isConfigValid) { + throw new Error(`The ${key} must be conform to the specifications`); + } } +}; - if (typeof connect !== 'function') { - connect = buildConnector({ - ...tls, - maxCachedSessions, - allowH2, - socketPath, - timeout: connectTimeout, - ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), - ...connect - }) + +/***/ }), + +/***/ 97211: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkEnv = void 0; +function checkEnv(msg) { + if (process.browser) { + console.warn(msg); } +} +exports.checkEnv = checkEnv; - this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client) - ? interceptors.Client - : [createRedirectInterceptor({ maxRedirections })] - this[kUrl] = util.parseOrigin(url) - this[kConnector] = connect - this[kSocket] = null - this[kPipelining] = pipelining != null ? pipelining : 1 - this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize - this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout - this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout - this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold - this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout] - this[kServerName] = null - this[kLocalAddress] = localAddress != null ? localAddress : null - this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming - this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming - this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n` - this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3 - this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3 - this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength - this[kMaxRedirections] = maxRedirections - this[kMaxRequests] = maxRequestsPerClient - this[kClosedResolve] = null - this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1 - this[kHTTPConnVersion] = 'h1' - // HTTP/2 - this[kHTTP2Session] = null - this[kHTTP2SessionState] = !allowH2 - ? null - : { - // streams: null, // Fixed queue of streams - For future support of `push` - openStreams: 0, // Keep track of them to decide wether or not unref the session - maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server - } - this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}` +/***/ }), - // kQueue is built up of 3 sections separated by - // the kRunningIdx and kPendingIdx indices. - // | complete | running | pending | - // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length - // kRunningIdx points to the first running element. - // kPendingIdx points to the first pending element. - // This implements a fast queue with an amortized - // time of O(1). +/***/ 40849: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - this[kQueue] = [] - this[kRunningIdx] = 0 - this[kPendingIdx] = 0 - } +"use strict"; - get pipelining () { - return this[kPipelining] - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkObjectTag = void 0; +const { checkValid } = __nccwpck_require__(67355); +const { isObject } = __nccwpck_require__(81269); +const commonRules = [ + { + validator: value => { + if (typeof value !== 'string') { + throw new Error('the key and value of the tag must be String'); + } + } + }, + { + pattern: /^[a-zA-Z0-9 +-=._:/]+$/, + msg: 'tag can contain letters, numbers, spaces, and the following symbols: plus sign (+), hyphen (-), equal sign (=), period (.), underscore (_), colon (:), and forward slash (/)' + } +]; +const rules = { + key: [ + ...commonRules, + { + pattern: /^.{1,128}$/, + msg: 'tag key can be a maximum of 128 bytes in length' + } + ], + value: [ + ...commonRules, + { + pattern: /^.{0,256}$/, + msg: 'tag value can be a maximum of 256 bytes in length' + } + ] +}; +function checkObjectTag(tag) { + if (!isObject(tag)) { + throw new Error('tag must be Object'); + } + const entries = Object.entries(tag); + if (entries.length > 10) { + throw new Error('maximum of 10 tags for a object'); + } + const rulesIndexKey = ['key', 'value']; + entries.forEach(keyValue => { + keyValue.forEach((item, index) => { + checkValid(item, rules[rulesIndexKey[index]]); + }); + }); +} +exports.checkObjectTag = checkObjectTag; - set pipelining (value) { - this[kPipelining] = value - resume(this, true) - } - get [kPending] () { - return this[kQueue].length - this[kPendingIdx] - } +/***/ }), - get [kRunning] () { - return this[kPendingIdx] - this[kRunningIdx] - } +/***/ 67355: +/***/ ((__unused_webpack_module, exports) => { - get [kSize] () { - return this[kQueue].length - this[kRunningIdx] - } +"use strict"; - get [kConnected] () { - return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkValid = void 0; +function checkValid(_value, _rules) { + _rules.forEach(rule => { + if (rule.validator) { + rule.validator(_value); + } + else if (rule.pattern && !rule.pattern.test(_value)) { + throw new Error(rule.msg); + } + }); +} +exports.checkValid = checkValid; - get [kBusy] () { - const socket = this[kSocket] - return ( - (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) || - (this[kSize] >= (this[kPipelining] || 1)) || - this[kPending] > 0 - ) - } - /* istanbul ignore: only used for test */ - [kConnect] (cb) { - connect(this) - this.once('connect', cb) - } +/***/ }), - [kDispatch] (opts, handler) { - const origin = opts.origin || this[kUrl].origin +/***/ 71828: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const request = this[kHTTPConnVersion] === 'h2' - ? Request[kHTTP2BuildRequest](origin, opts, handler) - : Request[kHTTP1BuildRequest](origin, opts, handler) +"use strict"; - this[kQueue].push(request) - if (this[kResuming]) { - // Do nothing. - } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) { - // Wait a tick in case stream/iterator is ended in the same tick. - this[kResuming] = 1 - process.nextTick(resume, this) - } else { - resume(this, true) +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createRequest = void 0; +const crypto = __nccwpck_require__(6113); +const debug = __nccwpck_require__(38237)('ali-oss'); +const _isString = __nccwpck_require__(65704); +const _isArray = __nccwpck_require__(44869); +const _isObject = __nccwpck_require__(33334); +const mime = __nccwpck_require__(29994); +const dateFormat = __nccwpck_require__(51512); +const copy = __nccwpck_require__(30952); +const path = __nccwpck_require__(71017); +const { encoder } = __nccwpck_require__(63240); +const { isIP } = __nccwpck_require__(42439); +const { setRegion } = __nccwpck_require__(16690); +const { getReqUrl } = __nccwpck_require__(86783); +const { isDingTalk } = __nccwpck_require__(15605); +function getHeader(headers, name) { + return headers[name] || headers[name.toLowerCase()]; +} +function delHeader(headers, name) { + delete headers[name]; + delete headers[name.toLowerCase()]; +} +function createRequest(params) { + let date = new Date(); + if (this.options.amendTimeSkewed) { + date = +new Date() + this.options.amendTimeSkewed; + } + const headers = { + 'x-oss-date': dateFormat(date, this.options.authorizationV4 ? "UTC:yyyymmdd'T'HHMMss'Z'" : "UTC:ddd, dd mmm yyyy HH:MM:ss 'GMT'") + }; + if (this.options.authorizationV4) { + headers['x-oss-content-sha256'] = 'UNSIGNED-PAYLOAD'; + } + if (typeof window !== 'undefined') { + headers['x-oss-user-agent'] = this.userAgent; + } + if (this.userAgent.includes('nodejs')) { + headers['User-Agent'] = this.userAgent; + } + if (this.options.isRequestPay) { + Object.assign(headers, { 'x-oss-request-payer': 'requester' }); + } + if (this.options.stsToken) { + headers['x-oss-security-token'] = this.options.stsToken; + } + copy(params.headers).to(headers); + if (!getHeader(headers, 'Content-Type')) { + if (params.mime && params.mime.indexOf('/') > 0) { + headers['Content-Type'] = params.mime; + } + else if (isDingTalk()) { + headers['Content-Type'] = 'application/octet-stream'; + } + else { + headers['Content-Type'] = mime.getType(params.mime || path.extname(params.object || '')); + } + } + if (!getHeader(headers, 'Content-Type')) { + delHeader(headers, 'Content-Type'); + } + if (params.content) { + if (!params.disabledMD5) { + if (!params.headers || !params.headers['Content-MD5']) { + headers['Content-MD5'] = crypto.createHash('md5').update(Buffer.from(params.content, 'utf8')).digest('base64'); + } + else { + headers['Content-MD5'] = params.headers['Content-MD5']; + } + } + if (!headers['Content-Length']) { + headers['Content-Length'] = params.content.length; + } + } + const { hasOwnProperty } = Object.prototype; + for (const k in headers) { + if (headers[k] && hasOwnProperty.call(headers, k)) { + headers[k] = encoder(String(headers[k]), this.options.headerEncoding); + } + } + const queries = {}; + if (_isString(params.subres)) { + queries[params.subres] = null; + } + else if (_isArray(params.subres)) { + params.subres.forEach(v => { + queries[v] = null; + }); + } + else if (_isObject(params.subres)) { + Object.entries(params.subres).forEach(v => { + queries[v[0]] = v[1] === '' ? null : v[1]; + }); + } + if (_isObject(params.query)) { + Object.entries(params.query).forEach(v => { + queries[v[0]] = v[1]; + }); + } + headers.authorization = this.options.authorizationV4 + ? this.authorizationV4(params.method, { + headers, + queries + }, params.bucket, params.object, params.additionalHeaders) + : this.authorization(params.method, this._getResource(params), params.subres, headers, this.options.headerEncoding); + // const url = this._getReqUrl(params); + if (isIP(this.options.endpoint.hostname)) { + const { region, internal, secure } = this.options; + const hostInfo = setRegion(region, internal, secure); + headers.host = `${params.bucket}.${hostInfo.host}`; + } + const url = getReqUrl.bind(this)(params); + debug('request %s %s, with headers %j, !!stream: %s', params.method, url, headers, !!params.stream); + const timeout = params.timeout || this.options.timeout; + const reqParams = { + method: params.method, + content: params.content, + stream: params.stream, + headers, + timeout, + writeStream: params.writeStream, + customResponse: params.customResponse, + ctx: params.ctx || this.ctx + }; + if (this.agent) { + reqParams.agent = this.agent; + } + if (this.httpsAgent) { + reqParams.httpsAgent = this.httpsAgent; } + reqParams.enableProxy = !!this.options.enableProxy; + reqParams.proxy = this.options.proxy ? this.options.proxy : null; + return { + url, + params: reqParams + }; +} +exports.createRequest = createRequest; - if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) { - this[kNeedDrain] = 2 + +/***/ }), + +/***/ 61615: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.dataFix = void 0; +const isObject_1 = __nccwpck_require__(81269); +const TRUE = ['true', 'TRUE', '1', 1]; +const FALSE = ['false', 'FALSE', '0', 0]; +function dataFix(o, conf, finalKill) { + if (!isObject_1.isObject(o)) + return; + const { remove = [], rename = {}, camel = [], bool = [], lowerFirst = false } = conf; + // 删除不需要的数据 + remove.forEach(v => delete o[v]); + // 重命名 + Object.entries(rename).forEach(v => { + if (!o[v[0]]) + return; + if (o[v[1]]) + return; + o[v[1]] = o[v[0]]; + delete o[v[0]]; + }); + // 驼峰化 + camel.forEach(v => { + if (!o[v]) + return; + const afterKey = v.replace(/^(.)/, $0 => $0.toLowerCase()).replace(/-(\w)/g, (_, $1) => $1.toUpperCase()); + if (o[afterKey]) + return; + o[afterKey] = o[v]; + // todo 暂时兼容以前数据,不做删除 + // delete o[v]; + }); + // 转换值为布尔值 + bool.forEach(v => { + o[v] = fixBool(o[v]); + }); + // finalKill + if (typeof finalKill === 'function') { + finalKill(o); + } + // 首字母转小写 + fixLowerFirst(o, lowerFirst); + return dataFix; +} +exports.dataFix = dataFix; +function fixBool(value) { + if (!value) + return false; + if (TRUE.includes(value)) + return true; + return FALSE.includes(value) ? false : value; +} +function fixLowerFirst(o, lowerFirst) { + if (lowerFirst) { + Object.keys(o).forEach(key => { + const lowerK = key.replace(/^\w/, match => match.toLowerCase()); + if (typeof o[lowerK] === 'undefined') { + o[lowerK] = o[key]; + delete o[key]; + } + }); } +} - return this[kNeedDrain] < 2 - } - async [kClose] () { - // TODO: for H2 we need to gracefully flush the remaining enqueued - // request and close each stream. - return new Promise((resolve) => { - if (!this[kSize]) { - resolve(null) - } else { - this[kClosedResolve] = resolve - } - }) - } +/***/ }), - async [kDestroy] (err) { - return new Promise((resolve) => { - const requests = this[kQueue].splice(this[kPendingIdx]) - for (let i = 0; i < requests.length; i++) { - const request = requests[i] - errorRequest(this, request, err) - } +/***/ 97150: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const callback = () => { - if (this[kClosedResolve]) { - // TODO (fix): Should we error here with ClientDestroyedError? - this[kClosedResolve]() - this[kClosedResolve] = null +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.deepCopyWith = exports.deepCopy = void 0; +const isBuffer_1 = __nccwpck_require__(55850); +exports.deepCopy = obj => { + if (obj === null || typeof obj !== 'object') { + return obj; + } + if (isBuffer_1.isBuffer(obj)) { + return obj.slice(); + } + const copy = Array.isArray(obj) ? [] : {}; + Object.keys(obj).forEach(key => { + copy[key] = exports.deepCopy(obj[key]); + }); + return copy; +}; +exports.deepCopyWith = (obj, customizer) => { + function deepCopyWithHelper(value, innerKey, innerObject) { + const result = customizer(value, innerKey, innerObject); + if (result !== undefined) + return result; + if (value === null || typeof value !== 'object') { + return value; } - resolve() - } + if (isBuffer_1.isBuffer(value)) { + return value.slice(); + } + const copy = Array.isArray(value) ? [] : {}; + Object.keys(value).forEach(k => { + copy[k] = deepCopyWithHelper(value[k], k, value); + }); + return copy; + } + if (customizer) { + return deepCopyWithHelper(obj, '', null); + } + else { + return exports.deepCopy(obj); + } +}; - if (this[kHTTP2Session] != null) { - util.destroy(this[kHTTP2Session], err) - this[kHTTP2Session] = null - this[kHTTP2SessionState] = null - } - if (!this[kSocket]) { - queueMicrotask(callback) - } else { - util.destroy(this[kSocket].on('close', callback), err) - } +/***/ }), - resume(this) - }) - } +/***/ 47563: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.encodeString = void 0; +const toString_1 = __importDefault(__nccwpck_require__(32931)); +function encodeString(str) { + const tempStr = toString_1.default(str); + return encodeURIComponent(tempStr).replace(/[!'()*]/g, c => `%${c.charCodeAt(0).toString(16).toUpperCase()}`); } +exports.encodeString = encodeString; -function onHttp2SessionError (err) { - assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') - this[kSocket][kError] = err +/***/ }), - onError(this[kClient], err) -} +/***/ 63240: +/***/ ((__unused_webpack_module, exports) => { -function onHttp2FrameError (type, code, id) { - const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) +"use strict"; - if (id === 0) { - this[kSocket][kError] = err - onError(this[kClient], err) - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.encoder = void 0; +function encoder(str, encoding = 'utf-8') { + if (encoding === 'utf-8') + return str; + return Buffer.from(str).toString('latin1'); } +exports.encoder = encoder; -function onHttp2SessionEnd () { - util.destroy(this, new SocketError('other side closed')) - util.destroy(this[kSocket], new SocketError('other side closed')) -} -function onHTTP2GoAway (code) { - const client = this[kClient] - const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`) - client[kSocket] = null - client[kHTTP2Session] = null +/***/ }), - if (client.destroyed) { - assert(this[kPending] === 0) +/***/ 85708: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // Fail entire queue. - const requests = client[kQueue].splice(client[kRunningIdx]) - for (let i = 0; i < requests.length; i++) { - const request = requests[i] - errorRequest(this, request, err) +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.formatInventoryConfig = void 0; +const dataFix_1 = __nccwpck_require__(61615); +const isObject_1 = __nccwpck_require__(81269); +const isArray_1 = __nccwpck_require__(71954); +const formatObjKey_1 = __nccwpck_require__(72095); +function formatInventoryConfig(inventoryConfig, toArray = false) { + if (toArray && isObject_1.isObject(inventoryConfig)) + inventoryConfig = [inventoryConfig]; + if (isArray_1.isArray(inventoryConfig)) { + inventoryConfig = inventoryConfig.map(formatFn); } - } else if (client[kRunning] > 0) { - // Fail head of pipeline. - const request = client[kQueue][client[kRunningIdx]] - client[kQueue][client[kRunningIdx]++] = null + else { + inventoryConfig = formatFn(inventoryConfig); + } + return inventoryConfig; +} +exports.formatInventoryConfig = formatInventoryConfig; +function formatFn(_) { + dataFix_1.dataFix(_, { bool: ['IsEnabled'] }, conf => { + var _a, _b; + // prefix + conf.prefix = conf.Filter.Prefix; + delete conf.Filter; + // OSSBucketDestination + conf.OSSBucketDestination = conf.Destination.OSSBucketDestination; + // OSSBucketDestination.rolename + conf.OSSBucketDestination.rolename = conf.OSSBucketDestination.RoleArn.replace(/.*\//, ''); + delete conf.OSSBucketDestination.RoleArn; + // OSSBucketDestination.bucket + conf.OSSBucketDestination.bucket = conf.OSSBucketDestination.Bucket.replace(/.*:::/, ''); + delete conf.OSSBucketDestination.Bucket; + delete conf.Destination; + // frequency + conf.frequency = conf.Schedule.Frequency; + delete conf.Schedule.Frequency; + // optionalFields + if (((_a = conf === null || conf === void 0 ? void 0 : conf.OptionalFields) === null || _a === void 0 ? void 0 : _a.Field) && !isArray_1.isArray((_b = conf.OptionalFields) === null || _b === void 0 ? void 0 : _b.Field)) + conf.OptionalFields.Field = [conf.OptionalFields.Field]; + }); + // firstLowerCase + _ = formatObjKey_1.formatObjKey(_, 'firstLowerCase', { exclude: ['OSSBucketDestination', 'SSE-OSS', 'SSE-KMS'] }); + return _; +} - errorRequest(client, request, err) - } - client[kPendingIdx] = client[kRunningIdx] +/***/ }), - assert(client[kRunning] === 0) +/***/ 72095: +/***/ ((__unused_webpack_module, exports) => { - client.emit('disconnect', - client[kUrl], - [client], - err - ) +"use strict"; - resume(client) +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.formatObjKey = void 0; +function formatObjKey(obj, type, options) { + if (obj === null || typeof obj !== 'object') { + return obj; + } + let o; + if (Array.isArray(obj)) { + o = []; + for (let i = 0; i < obj.length; i++) { + o.push(formatObjKey(obj[i], type, options)); + } + } + else { + o = {}; + Object.keys(obj).forEach(key => { + o[handelFormat(key, type, options)] = formatObjKey(obj[key], type, options); + }); + } + return o; +} +exports.formatObjKey = formatObjKey; +function handelFormat(key, type, options) { + if (options && options.exclude && options.exclude.includes(key)) + return key; + if (type === 'firstUpperCase') { + key = key.replace(/^./, (_) => _.toUpperCase()); + } + else if (type === 'firstLowerCase') { + key = key.replace(/^./, (_) => _.toLowerCase()); + } + return key; } -const constants = __nccwpck_require__(953) -const createRedirectInterceptor = __nccwpck_require__(8861) -const EMPTY_BUF = Buffer.alloc(0) -async function lazyllhttp () { - const llhttpWasmData = process.env.JEST_WORKER_ID ? __nccwpck_require__(1145) : undefined +/***/ }), - let mod - try { - mod = await WebAssembly.compile(Buffer.from(__nccwpck_require__(5627), 'base64')) - } catch (e) { - /* istanbul ignore next */ +/***/ 97115: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // We could check if the error was caused by the simd option not - // being enabled, but the occurring of this other error - // * https://github.com/emscripten-core/emscripten/issues/11495 - // got me to remove that check to avoid breaking Node 12. - mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || __nccwpck_require__(1145), 'base64')) - } +"use strict"; - return await WebAssembly.instantiate(mod, { - env: { - /* eslint-disable camelcase */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.formatTag = void 0; +const isObject_1 = __nccwpck_require__(81269); +function formatTag(obj) { + if (obj.Tagging !== undefined) { + obj = obj.Tagging.TagSet.Tag; + } + else if (obj.TagSet !== undefined) { + obj = obj.TagSet.Tag; + } + else if (obj.Tag !== undefined) { + obj = obj.Tag; + } + obj = obj && isObject_1.isObject(obj) ? [obj] : obj || []; + const tag = {}; + obj.forEach(item => { + tag[item.Key] = item.Value; + }); + return tag; +} +exports.formatTag = formatTag; - wasm_on_url: (p, at, len) => { - /* istanbul ignore next */ - return 0 - }, - wasm_on_status: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) - const start = at - currentBufferPtr + currentBufferRef.byteOffset - return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 - }, - wasm_on_message_begin: (p) => { - assert.strictEqual(currentParser.ptr, p) - return currentParser.onMessageBegin() || 0 - }, - wasm_on_header_field: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) - const start = at - currentBufferPtr + currentBufferRef.byteOffset - return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 - }, - wasm_on_header_value: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) - const start = at - currentBufferPtr + currentBufferRef.byteOffset - return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 - }, - wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { - assert.strictEqual(currentParser.ptr, p) - return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0 - }, - wasm_on_body: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) - const start = at - currentBufferPtr + currentBufferRef.byteOffset - return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 - }, - wasm_on_message_complete: (p) => { - assert.strictEqual(currentParser.ptr, p) - return currentParser.onMessageComplete() || 0 - } - /* eslint-enable camelcase */ - } - }) -} +/***/ }), -let llhttpInstance = null -let llhttpPromise = lazyllhttp() -llhttpPromise.catch() +/***/ 98423: +/***/ ((__unused_webpack_module, exports) => { -let currentParser = null -let currentBufferRef = null -let currentBufferSize = 0 -let currentBufferPtr = null +"use strict"; -const TIMEOUT_HEADERS = 1 -const TIMEOUT_BODY = 2 -const TIMEOUT_IDLE = 3 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getStandardRegion = void 0; +function getStandardRegion(str) { + return str.replace(/^oss-/g, ''); +} +exports.getStandardRegion = getStandardRegion; -class Parser { - constructor (client, socket, { exports }) { - assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0) - this.llhttp = exports - this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE) - this.client = client - this.socket = socket - this.timeout = null - this.timeoutValue = null - this.timeoutType = null - this.statusCode = null - this.statusText = '' - this.upgrade = false - this.headers = [] - this.headersSize = 0 - this.headersMaxSize = client[kMaxHeadersSize] - this.shouldKeepAlive = false - this.paused = false - this.resume = this.resume.bind(this) +/***/ }), - this.bytesRead = 0 +/***/ 70365: +/***/ ((__unused_webpack_module, exports) => { - this.keepAlive = '' - this.contentLength = '' - this.connection = '' - this.maxResponseSize = client[kMaxResponseSize] - } +"use strict"; - setTimeout (value, type) { - this.timeoutType = type - if (value !== this.timeoutValue) { - timers.clearTimeout(this.timeout) - if (value) { - this.timeout = timers.setTimeout(onParserTimeout, value, this) - // istanbul ignore else: only for jest - if (this.timeout.unref) { - this.timeout.unref() +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getStrBytesCount = void 0; +function getStrBytesCount(str) { + let bytesCount = 0; + for (let i = 0; i < str.length; i++) { + const c = str.charAt(i); + if (/^[\u00-\uff]$/.test(c)) { + bytesCount += 1; + } + else { + bytesCount += 2; } - } else { - this.timeout = null - } - this.timeoutValue = value - } else if (this.timeout) { - // istanbul ignore else: only for jest - if (this.timeout.refresh) { - this.timeout.refresh() - } } - } + return bytesCount; +} +exports.getStrBytesCount = getStrBytesCount; - resume () { - if (this.socket.destroyed || !this.paused) { - return - } - assert(this.ptr != null) - assert(currentParser == null) +/***/ }), - this.llhttp.llhttp_resume(this.ptr) +/***/ 71954: +/***/ ((__unused_webpack_module, exports) => { - assert(this.timeoutType === TIMEOUT_BODY) - if (this.timeout) { - // istanbul ignore else: only for jest - if (this.timeout.refresh) { - this.timeout.refresh() - } - } +"use strict"; - this.paused = false - this.execute(this.socket.read() || EMPTY_BUF) // Flush parser. - this.readMore() - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isArray = void 0; +exports.isArray = obj => { + return Object.prototype.toString.call(obj) === '[object Array]'; +}; - readMore () { - while (!this.paused && this.ptr) { - const chunk = this.socket.read() - if (chunk === null) { - break - } - this.execute(chunk) - } - } - execute (data) { - assert(this.ptr != null) - assert(currentParser == null) - assert(!this.paused) +/***/ }), - const { socket, llhttp } = this +/***/ 55850: +/***/ ((__unused_webpack_module, exports) => { - if (data.length > currentBufferSize) { - if (currentBufferPtr) { - llhttp.free(currentBufferPtr) - } - currentBufferSize = Math.ceil(data.length / 4096) * 4096 - currentBufferPtr = llhttp.malloc(currentBufferSize) - } +"use strict"; - new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data) +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isBuffer = void 0; +function isBuffer(obj) { + return Buffer.isBuffer(obj); +} +exports.isBuffer = isBuffer; - // Call `execute` on the wasm parser. - // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data, - // and finally the length of bytes to parse. - // The return value is an error code or `constants.ERROR.OK`. - try { - let ret - try { - currentBufferRef = data - currentParser = this - ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length) - /* eslint-disable-next-line no-useless-catch */ - } catch (err) { - /* istanbul ignore next: difficult to make a test case for */ - throw err - } finally { - currentParser = null - currentBufferRef = null - } +/***/ }), - const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr +/***/ 15605: +/***/ ((__unused_webpack_module, exports) => { - if (ret === constants.ERROR.PAUSED_UPGRADE) { - this.onUpgrade(data.slice(offset)) - } else if (ret === constants.ERROR.PAUSED) { - this.paused = true - socket.unshift(data.slice(offset)) - } else if (ret !== constants.ERROR.OK) { - const ptr = llhttp.llhttp_get_error_reason(this.ptr) - let message = '' - /* istanbul ignore else: difficult to make a test case for */ - if (ptr) { - const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0) - message = - 'Response does not match the HTTP/1.1 protocol (' + - Buffer.from(llhttp.memory.buffer, ptr, len).toString() + - ')' - } - throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset)) - } - } catch (err) { - util.destroy(socket, err) +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isDingTalk = void 0; +function isDingTalk() { + if (process.browser && window.navigator.userAgent.toLowerCase().includes('aliapp(dingtalk')) { + return true; } - } + return false; +} +exports.isDingTalk = isDingTalk; - destroy () { - assert(this.ptr != null) - assert(currentParser == null) - this.llhttp.llhttp_free(this.ptr) - this.ptr = null +/***/ }), - timers.clearTimeout(this.timeout) - this.timeout = null - this.timeoutValue = null - this.timeoutType = null +/***/ 77169: +/***/ ((__unused_webpack_module, exports) => { - this.paused = false - } +"use strict"; - onStatus (buf) { - this.statusText = buf.toString() - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isFile = void 0; +exports.isFile = obj => { + return typeof File !== 'undefined' && obj instanceof File; +}; - onMessageBegin () { - const { socket, client } = this - /* istanbul ignore next: difficult to make a test case for */ - if (socket.destroyed) { - return -1 - } +/***/ }), - const request = client[kQueue][client[kRunningIdx]] - if (!request) { - return -1 - } - } +/***/ 18818: +/***/ ((__unused_webpack_module, exports) => { - onHeaderField (buf) { - const len = this.headers.length +"use strict"; - if ((len & 1) === 0) { - this.headers.push(buf) - } else { - this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isFunction = void 0; +exports.isFunction = (v) => { + return typeof v === 'function'; +}; - this.trackHeader(buf.length) - } - onHeaderValue (buf) { - let len = this.headers.length +/***/ }), - if ((len & 1) === 1) { - this.headers.push(buf) - len += 1 - } else { - this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) - } +/***/ 42439: +/***/ ((__unused_webpack_module, exports) => { - const key = this.headers[len - 2] - if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') { - this.keepAlive += buf.toString() - } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') { - this.connection += buf.toString() - } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') { - this.contentLength += buf.toString() - } +"use strict"; - this.trackHeader(buf.length) - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isIP = void 0; +// it provide commont methods for node and browser , we will add more solutions later in this file +/** + * Judge isIP include ipv4 or ipv6 + * @param {String} options + * @return {Array} the multipart uploads + */ +exports.isIP = host => { + const ipv4Regex = /^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$/; + const ipv6Regex = /^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$/; + return ipv4Regex.test(host) || ipv6Regex.test(host); +}; - trackHeader (len) { - this.headersSize += len - if (this.headersSize >= this.headersMaxSize) { - util.destroy(this.socket, new HeadersOverflowError()) - } - } - onUpgrade (head) { - const { upgrade, client, socket, headers, statusCode } = this +/***/ }), - assert(upgrade) +/***/ 81269: +/***/ ((__unused_webpack_module, exports) => { - const request = client[kQueue][client[kRunningIdx]] - assert(request) +"use strict"; - assert(!socket.destroyed) - assert(socket === client[kSocket]) - assert(!this.paused) - assert(request.upgrade || request.method === 'CONNECT') +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isObject = void 0; +exports.isObject = obj => { + return Object.prototype.toString.call(obj) === '[object Object]'; +}; - this.statusCode = null - this.statusText = '' - this.shouldKeepAlive = null - assert(this.headers.length % 2 === 0) - this.headers = [] - this.headersSize = 0 +/***/ }), - socket.unshift(head) +/***/ 75285: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - socket[kParser].destroy() - socket[kParser] = null +"use strict"; - socket[kClient] = null - socket[kError] = null - socket - .removeListener('error', onSocketError) - .removeListener('readable', onSocketReadable) - .removeListener('end', onSocketEnd) - .removeListener('close', onSocketClose) +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.lowercaseKeyHeader = void 0; +const isObject_1 = __nccwpck_require__(81269); +function lowercaseKeyHeader(headers) { + const lowercaseHeader = {}; + if (isObject_1.isObject(headers)) { + Object.keys(headers).forEach(key => { + lowercaseHeader[key.toLowerCase()] = headers[key]; + }); + } + return lowercaseHeader; +} +exports.lowercaseKeyHeader = lowercaseKeyHeader; - client[kSocket] = null - client[kQueue][client[kRunningIdx]++] = null - client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade')) - try { - request.onUpgrade(statusCode, headers, socket) - } catch (err) { - util.destroy(socket, err) - } +/***/ }), - resume(client) - } +/***/ 96846: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - onHeadersComplete (statusCode, upgrade, shouldKeepAlive) { - const { client, socket, headers, statusText } = this +"use strict"; - /* istanbul ignore next: difficult to make a test case for */ - if (socket.destroyed) { - return -1 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.obj2xml = void 0; +const formatObjKey_1 = __nccwpck_require__(72095); +function type(params) { + return Object.prototype.toString + .call(params) + .replace(/(.*? |])/g, '') + .toLowerCase(); +} +function obj2xml(obj, options) { + let s = ''; + if (options && options.headers) { + s = '\n'; + } + if (options && options.firstUpperCase) { + obj = formatObjKey_1.formatObjKey(obj, 'firstUpperCase'); + } + if (type(obj) === 'object') { + Object.keys(obj).forEach(key => { + // filter undefined or null + if (type(obj[key]) !== 'undefined' && type(obj[key]) !== 'null') { + if (type(obj[key]) === 'string' || type(obj[key]) === 'number') { + s += `<${key}>${obj[key]}`; + } + else if (type(obj[key]) === 'object') { + s += `<${key}>${obj2xml(obj[key])}`; + } + else if (type(obj[key]) === 'array') { + s += obj[key].map(keyChild => `<${key}>${obj2xml(keyChild)}`).join(''); + } + else { + s += `<${key}>${obj[key].toString()}`; + } + } + }); } + else { + s += obj.toString(); + } + return s; +} +exports.obj2xml = obj2xml; - const request = client[kQueue][client[kRunningIdx]] - /* istanbul ignore next: difficult to make a test case for */ - if (!request) { - return -1 - } +/***/ }), - assert(!this.upgrade) - assert(this.statusCode < 200) +/***/ 72577: +/***/ ((__unused_webpack_module, exports) => { - if (statusCode === 100) { - util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket))) - return -1 - } +"use strict"; - /* this can only happen if server is misbehaving */ - if (upgrade && !request.upgrade) { - util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket))) - return -1 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.omit = void 0; +function omit(originalObject, keysToOmit) { + const cloneObject = Object.assign({}, originalObject); + for (const path of keysToOmit) { + delete cloneObject[path]; } + return cloneObject; +} +exports.omit = omit; - assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS) - this.statusCode = statusCode - this.shouldKeepAlive = ( - shouldKeepAlive || - // Override llhttp value which does not allow keepAlive for HEAD. - (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive') - ) +/***/ }), - if (this.statusCode >= 200) { - const bodyTimeout = request.bodyTimeout != null - ? request.bodyTimeout - : client[kBodyTimeout] - this.setTimeout(bodyTimeout, TIMEOUT_BODY) - } else if (this.timeout) { - // istanbul ignore else: only for jest - if (this.timeout.refresh) { - this.timeout.refresh() - } - } +/***/ 30061: +/***/ ((__unused_webpack_module, exports) => { - if (request.method === 'CONNECT') { - assert(client[kRunning] === 1) - this.upgrade = true - return 2 - } +"use strict"; - if (upgrade) { - assert(client[kRunning] === 1) - this.upgrade = true - return 2 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.policy2Str = void 0; +function policy2Str(policy) { + let policyStr; + if (policy) { + if (typeof policy === 'string') { + try { + policyStr = JSON.stringify(JSON.parse(policy)); + } + catch (err) { + throw new Error(`Policy string is not a valid JSON: ${err.message}`); + } + } + else { + policyStr = JSON.stringify(policy); + } } + return policyStr; +} +exports.policy2Str = policy2Str; - assert(this.headers.length % 2 === 0) - this.headers = [] - this.headersSize = 0 - if (this.shouldKeepAlive && client[kPipelining]) { - const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null +/***/ }), - if (keepAliveTimeout != null) { - const timeout = Math.min( - keepAliveTimeout - client[kKeepAliveTimeoutThreshold], - client[kKeepAliveMaxTimeout] - ) - if (timeout <= 0) { - socket[kReset] = true - } else { - client[kKeepAliveTimeoutValue] = timeout - } - } else { - client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout] - } - } else { - // Stop more requests from being dispatched. - socket[kReset] = true - } +/***/ 41514: +/***/ ((__unused_webpack_module, exports) => { - const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false +"use strict"; - if (request.aborted) { - return -1 - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.retry = void 0; +function retry(func, retryMax, config = {}) { + let retryNum = 0; + const { retryDelay = 500, errorHandler = () => true } = config; + const funcR = (...arg) => { + return new Promise((resolve, reject) => { + func(...arg) + .then(result => { + retryNum = 0; + resolve(result); + }) + .catch(err => { + if (retryNum < retryMax && errorHandler(err)) { + retryNum++; + setTimeout(() => { + resolve(funcR(...arg)); + }, retryDelay); + } + else { + retryNum = 0; + reject(err); + } + }); + }); + }; + return funcR; +} +exports.retry = retry; - if (request.method === 'HEAD') { - return 1 - } - if (statusCode < 200) { - return 1 - } +/***/ }), - if (socket[kBlocking]) { - socket[kBlocking] = false - resume(client) +/***/ 16690: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.setRegion = void 0; +const url_1 = __importDefault(__nccwpck_require__(57310)); +const checkConfigValid_1 = __nccwpck_require__(27324); +function setRegion(region, internal = false, secure = false) { + checkConfigValid_1.checkConfigValid(region, 'region'); + const protocol = secure ? 'https://' : 'http://'; + let suffix = internal ? '-internal.aliyuncs.com' : '.aliyuncs.com'; + const prefix = 'vpc100-oss-cn-'; + // aliyun VPC region: https://help.aliyun.com/knowledge_detail/38740.html + if (region.substr(0, prefix.length) === prefix) { + suffix = '.aliyuncs.com'; } + return url_1.default.parse(protocol + region + suffix); +} +exports.setRegion = setRegion; - return pause ? constants.ERROR.PAUSED : 0 - } - onBody (buf) { - const { client, socket, statusCode, maxResponseSize } = this +/***/ }), - if (socket.destroyed) { - return -1 - } +/***/ 84622: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - const request = client[kQueue][client[kRunningIdx]] - assert(request) +"use strict"; - assert.strictEqual(this.timeoutType, TIMEOUT_BODY) - if (this.timeout) { - // istanbul ignore else: only for jest - if (this.timeout.refresh) { - this.timeout.refresh() - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkCredentials = exports.setSTSToken = void 0; +const formatObjKey_1 = __nccwpck_require__(72095); +async function setSTSToken() { + if (!this.options) + this.options = {}; + const now = new Date(); + if (this.stsTokenFreshTime) { + if (+now - this.stsTokenFreshTime >= this.options.refreshSTSTokenInterval) { + this.stsTokenFreshTime = now; + let credentials = await this.options.refreshSTSToken(); + credentials = formatObjKey_1.formatObjKey(credentials, 'firstLowerCase'); + if (credentials.securityToken) { + credentials.stsToken = credentials.securityToken; + } + checkCredentials(credentials); + Object.assign(this.options, credentials); + } + } + else { + this.stsTokenFreshTime = now; } + return null; +} +exports.setSTSToken = setSTSToken; +function checkCredentials(obj) { + const stsTokenKey = ['accessKeySecret', 'accessKeyId', 'stsToken']; + const objKeys = Object.keys(obj); + stsTokenKey.forEach(_ => { + if (!objKeys.find(key => key === _)) { + throw Error(`refreshSTSToken must return contains ${_}`); + } + }); +} +exports.checkCredentials = checkCredentials; - assert(statusCode >= 200) - if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) { - util.destroy(socket, new ResponseExceededMaxSizeError()) - return -1 - } +/***/ }), - this.bytesRead += buf.length +/***/ 30628: +/***/ ((module) => { - if (request.onData(buf) === false) { - return constants.ERROR.PAUSED - } +/* istanbul ignore next */ +module.exports = function (OssClient) { + /* istanbul ignore next */ + // function objectRequestParams(method, name, options) { + // options = options || {}; + // name = this._objectName(name); + // const authResource = `/${this.options.bucket}/${name}`; + // const params = { + // name, + // method, + // host: this.options.imageHost, + // resource: `/${name}`, + // timeout: options.timeout, + // authResource, + // ctx: options.ctx + // }; + // if (options.headers) { + // params.headers = options.headers; + // } + // return params; + // } + + function ImageClient(options) { + if (!(this instanceof ImageClient)) { + return new ImageClient(options); + } + if (!options.bucket) { + throw new Error('require bucket for image service instance'); + } + if (!options.imageHost) { + throw new Error('require imageHost for image service instance'); + } + + options.endpoint = options.imageHost; + this.ossClient = new OssClient(options); + this.ossClient.options.imageHost = options.imageHost; + // this.ossClient._objectRequestParams = objectRequestParams; } - onMessageComplete () { - const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this + /** + * Image operations + */ - if (socket.destroyed && (!statusCode || shouldKeepAlive)) { - return -1 - } + ImageClient.prototype.get = async function get(name, file, options) { + return await this.ossClient.get(name, file, options); + }; - if (upgrade) { - return - } + ImageClient.prototype.getStream = async function getStream(name, options) { + return await this.ossClient.getStream(name, options); + }; - const request = client[kQueue][client[kRunningIdx]] - assert(request) + ImageClient.prototype.getExif = async function getExif(name, options) { + const params = this.ossClient._objectRequestParams('GET', `${name}@exif`, options); + params.successStatuses = [200]; - assert(statusCode >= 100) + let result = await this.ossClient.request(params); + result = await this._parseResponse(result); + return { + res: result.res, + data: result.data + }; + }; - this.statusCode = null - this.statusText = '' - this.bytesRead = 0 - this.contentLength = '' - this.keepAlive = '' - this.connection = '' + ImageClient.prototype.getInfo = async function getInfo(name, options) { + const params = this.ossClient._objectRequestParams('GET', `${name}@infoexif`, options); + params.successStatuses = [200]; - assert(this.headers.length % 2 === 0) - this.headers = [] - this.headersSize = 0 + let result = await this.ossClient.request(params); + result = await this._parseResponse(result); + return { + res: result.res, + data: result.data + }; + }; - if (statusCode < 200) { - return - } + ImageClient.prototype.putStyle = async function putStyle(styleName, style, options) { + const params = this.ossClient._objectRequestParams('PUT', `/?style&styleName=${styleName}`, options); + params.successStatuses = [200]; + params.content = `${'\n`; - /* istanbul ignore next: should be handled by llhttp? */ - if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) { - util.destroy(socket, new ResponseContentLengthMismatchError()) - return -1 - } + let result = await this.ossClient.request(params); + result = await this._parseResponse(result); + return { + res: result.res, + data: result.data + }; + }; - request.onComplete(headers) + ImageClient.prototype.getStyle = async function getStyle(styleName, options) { + const params = this.ossClient._objectRequestParams('GET', `/?style&styleName=${styleName}`, options); + params.successStatuses = [200]; - client[kQueue][client[kRunningIdx]++] = null + let result = await this.ossClient.request(params); + result = await this._parseResponse(result); + return { + res: result.res, + data: result.data + }; + }; - if (socket[kWriting]) { - assert.strictEqual(client[kRunning], 0) - // Response completed before request. - util.destroy(socket, new InformationalError('reset')) - return constants.ERROR.PAUSED - } else if (!shouldKeepAlive) { - util.destroy(socket, new InformationalError('reset')) - return constants.ERROR.PAUSED - } else if (socket[kReset] && client[kRunning] === 0) { - // Destroy socket once all requests have completed. - // The request at the tail of the pipeline is the one - // that requested reset and no further requests should - // have been queued since then. - util.destroy(socket, new InformationalError('reset')) - return constants.ERROR.PAUSED - } else if (client[kPipelining] === 1) { - // We must wait a full event loop cycle to reuse this socket to make sure - // that non-spec compliant servers are not closing the connection even if they - // said they won't. - setImmediate(resume, client) - } else { - resume(client) - } - } -} + ImageClient.prototype.listStyle = async function listStyle(options) { + const params = this.ossClient._objectRequestParams('GET', '/?style', options); + params.successStatuses = [200]; -function onParserTimeout (parser) { - const { socket, timeoutType, client } = parser + let result = await this.ossClient.request(params); + result = await this._parseResponse(result); + return { + res: result.res, + data: result.data.Style + }; + }; - /* istanbul ignore else */ - if (timeoutType === TIMEOUT_HEADERS) { - if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { - assert(!parser.paused, 'cannot be paused while waiting for headers') - util.destroy(socket, new HeadersTimeoutError()) - } - } else if (timeoutType === TIMEOUT_BODY) { - if (!parser.paused) { - util.destroy(socket, new BodyTimeoutError()) - } - } else if (timeoutType === TIMEOUT_IDLE) { - assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]) - util.destroy(socket, new InformationalError('socket idle timeout')) - } -} + ImageClient.prototype.deleteStyle = async function deleteStyle(styleName, options) { + const params = this.ossClient._objectRequestParams('DELETE', `/?style&styleName=${styleName}`, options); + params.successStatuses = [204]; -function onSocketReadable () { - const { [kParser]: parser } = this - if (parser) { - parser.readMore() - } -} + const result = await this.ossClient.request(params); + return { + res: result.res + }; + }; + + ImageClient.prototype.signatureUrl = function signatureUrl(name) { + return this.ossClient.signatureUrl(name); + }; + + ImageClient.prototype._parseResponse = async function _parseResponse(result) { + const str = result.data.toString(); + const type = result.res.headers['content-type']; + + if (type === 'application/json') { + const data = JSON.parse(str); + result.data = {}; + if (data) { + Object.keys(data).forEach(key => { + result.data[key] = parseFloat(data[key].value, 10) || data[key].value; + }); + } + } else if (type === 'application/xml') { + result.data = await this.ossClient.parseXML(str); + } + return result; + }; -function onSocketError (err) { - const { [kClient]: client, [kParser]: parser } = this + return ImageClient; +}; - assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') - if (client[kHTTPConnVersion] !== 'h2') { - // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded - // to the user. - if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) { - // We treat all incoming data so for as a valid response. - parser.onMessageComplete() - return - } - } +/***/ }), - this[kError] = err +/***/ 40107: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - onError(this[kClient], err) -} +const fs = __nccwpck_require__(57147); +const is = __nccwpck_require__(65565); +const util = __nccwpck_require__(73837); +const path = __nccwpck_require__(71017); +const mime = __nccwpck_require__(29994); +const { isFile } = __nccwpck_require__(77169); +const { isArray } = __nccwpck_require__(71954); +const { isBuffer } = __nccwpck_require__(55850); +const { retry } = __nccwpck_require__(41514); -function onError (client, err) { - if ( - client[kRunning] === 0 && - err.code !== 'UND_ERR_INFO' && - err.code !== 'UND_ERR_SOCKET' - ) { - // Error is not caused by running request and not a recoverable - // socket error. +const proto = exports; - assert(client[kPendingIdx] === client[kRunningIdx]) +/** + * Multipart operations + */ - const requests = client[kQueue].splice(client[kRunningIdx]) - for (let i = 0; i < requests.length; i++) { - const request = requests[i] - errorRequest(client, request, err) - } - assert(client[kSize] === 0) +/** + * Upload a file to OSS using multipart uploads + * @param {String} name + * @param {String|File|Buffer} file + * @param {Object} options + * {Object} options.callback The callback parameter is composed of a JSON string encoded in Base64 + * {String} options.callback.url the OSS sends a callback request to this URL + * {String} options.callback.host The host header value for initiating callback requests + * {String} options.callback.body The value of the request body when a callback is initiated + * {String} options.callback.contentType The Content-Type of the callback requests initiatiated + * {Object} options.callback.customValue Custom parameters are a map of key-values, e.g: + * customValue = { + * key1: 'value1', + * key2: 'value2' + * } + */ +proto.multipartUpload = async function multipartUpload(name, file, options) { + this.resetCancelFlag(); + options = options || {}; + if (options.checkpoint && options.checkpoint.uploadId) { + return await this._resumeMultipart(options.checkpoint, options); } -} - -function onSocketEnd () { - const { [kParser]: parser, [kClient]: client } = this - if (client[kHTTPConnVersion] !== 'h2') { - if (parser.statusCode && !parser.shouldKeepAlive) { - // We treat all incoming data so far as a valid response. - parser.onMessageComplete() - return + const minPartSize = 100 * 1024; + if (!options.mime) { + if (isFile(file)) { + options.mime = mime.getType(path.extname(file.name)); + } else if (isBuffer(file)) { + options.mime = ''; + } else { + options.mime = mime.getType(path.extname(file)); } } + options.headers = options.headers || {}; + this._convertMetaToHeaders(options.meta, options.headers); - util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this))) -} + const fileSize = await this._getFileSize(file); + if (fileSize < minPartSize) { + options.contentLength = fileSize; + const result = await this.put(name, file, options); + if (options && options.progress) { + await options.progress(1); + } -function onSocketClose () { - const { [kClient]: client, [kParser]: parser } = this + const ret = { + res: result.res, + bucket: this.options.bucket, + name, + etag: result.res.headers.etag + }; - if (client[kHTTPConnVersion] === 'h1' && parser) { - if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) { - // We treat all incoming data so far as a valid response. - parser.onMessageComplete() + if ((options.headers && options.headers['x-oss-callback']) || options.callback) { + ret.data = result.data; } - this[kParser].destroy() - this[kParser] = null + return ret; } - const err = this[kError] || new SocketError('closed', util.getSocketInfo(this)) + if (options.partSize && !(parseInt(options.partSize, 10) === options.partSize)) { + throw new Error('partSize must be int number'); + } - client[kSocket] = null + if (options.partSize && options.partSize < minPartSize) { + throw new Error(`partSize must not be smaller than ${minPartSize}`); + } - if (client.destroyed) { - assert(client[kPending] === 0) + const initResult = await this.initMultipartUpload(name, options); + const { uploadId } = initResult; + const partSize = this._getPartSize(fileSize, options.partSize); - // Fail entire queue. - const requests = client[kQueue].splice(client[kRunningIdx]) - for (let i = 0; i < requests.length; i++) { - const request = requests[i] - errorRequest(client, request, err) - } - } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') { - // Fail head of pipeline. - const request = client[kQueue][client[kRunningIdx]] - client[kQueue][client[kRunningIdx]++] = null + const checkpoint = { + file, + name, + fileSize, + partSize, + uploadId, + doneParts: [] + }; - errorRequest(client, request, err) + if (options && options.progress) { + await options.progress(0, checkpoint, initResult.res); } - client[kPendingIdx] = client[kRunningIdx] - - assert(client[kRunning] === 0) - - client.emit('disconnect', client[kUrl], [client], err) - - resume(client) -} - -async function connect (client) { - assert(!client[kConnecting]) - assert(!client[kSocket]) - - let { host, hostname, protocol, port } = client[kUrl] - - // Resolve ipv6 - if (hostname[0] === '[') { - const idx = hostname.indexOf(']') + return await this._resumeMultipart(checkpoint, options); +}; - assert(idx !== -1) - const ip = hostname.substring(1, idx) +/* + * Resume multipart upload from checkpoint. The checkpoint will be + * updated after each successful part upload. + * @param {Object} checkpoint the checkpoint + * @param {Object} options + */ +proto._resumeMultipart = async function _resumeMultipart(checkpoint, options) { + const that = this; + if (this.isCancel()) { + throw this._makeCancelEvent(); + } + const { file, fileSize, partSize, uploadId, doneParts, name } = checkpoint; + + const partOffs = this._divideParts(fileSize, partSize); + const numParts = partOffs.length; + let uploadPartJob = retry( + (self, partNo) => { + // eslint-disable-next-line no-async-promise-executor + return new Promise(async (resolve, reject) => { + try { + if (!self.isCancel()) { + const pi = partOffs[partNo - 1]; + const stream = await self._createStream(file, pi.start, pi.end); + const data = { + stream, + size: pi.end - pi.start + }; - assert(net.isIP(ip)) - hostname = ip - } + if (isArray(self.multipartUploadStreams)) { + self.multipartUploadStreams.push(data.stream); + } else { + self.multipartUploadStreams = [data.stream]; + } - client[kConnecting] = true + const removeStreamFromMultipartUploadStreams = function () { + if (!stream.destroyed) { + stream.destroy(); + } + const index = self.multipartUploadStreams.indexOf(stream); + if (index !== -1) { + self.multipartUploadStreams.splice(index, 1); + } + }; - if (channels.beforeConnect.hasSubscribers) { - channels.beforeConnect.publish({ - connectParams: { - host, - hostname, - protocol, - port, - servername: client[kServerName], - localAddress: client[kLocalAddress] - }, - connector: client[kConnector] - }) - } + stream.on('close', removeStreamFromMultipartUploadStreams); + stream.on('error', removeStreamFromMultipartUploadStreams); - try { - const socket = await new Promise((resolve, reject) => { - client[kConnector]({ - host, - hostname, - protocol, - port, - servername: client[kServerName], - localAddress: client[kLocalAddress] - }, (err, socket) => { - if (err) { - reject(err) - } else { - resolve(socket) + let result; + try { + result = await self._uploadPart(name, uploadId, partNo, data, options); + } catch (error) { + removeStreamFromMultipartUploadStreams(); + if (error.status === 404) { + throw self._makeAbortEvent(); + } + throw error; + } + if (!self.isCancel()) { + doneParts.push({ + number: partNo, + etag: result.res.headers.etag + }); + checkpoint.doneParts = doneParts; + + if (options.progress) { + await options.progress(doneParts.length / (numParts + 1), checkpoint, result.res); + } + } + } + resolve(); + } catch (err) { + err.partNum = partNo; + reject(err); } - }) - }) - - if (client.destroyed) { - util.destroy(socket.on('error', () => {}), new ClientDestroyedError()) - return - } - - client[kConnecting] = false - - assert(socket) - - const isH2 = socket.alpnProtocol === 'h2' - if (isH2) { - if (!h2ExperimentalWarned) { - h2ExperimentalWarned = true - process.emitWarning('H2 support is experimental, expect them to change at any time.', { - code: 'UNDICI-H2' - }) + }); + }, + this.options.retryMax, + { + errorHandler: err => { + const _errHandle = _err => { + const statusErr = [-1, -2].includes(_err.status); + const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true); + return statusErr && requestErrorRetryHandle(_err); + }; + return !!_errHandle(err); } + } + ); - const session = http2.connect(client[kUrl], { - createConnection: () => socket, - peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams - }) + const all = Array.from(new Array(numParts), (x, i) => i + 1); + const done = doneParts.map(p => p.number); + const todo = all.filter(p => done.indexOf(p) < 0); - client[kHTTPConnVersion] = 'h2' - session[kClient] = client - session[kSocket] = socket - session.on('error', onHttp2SessionError) - session.on('frameError', onHttp2FrameError) - session.on('end', onHttp2SessionEnd) - session.on('goaway', onHTTP2GoAway) - session.on('close', onSocketClose) - session.unref() + const defaultParallel = 5; + const parallel = options.parallel || defaultParallel; - client[kHTTP2Session] = session - socket[kHTTP2Session] = session - } else { - if (!llhttpInstance) { - llhttpInstance = await llhttpPromise - llhttpPromise = null + if (this.checkBrowserAndVersion('Internet Explorer', '10') || parallel === 1) { + for (let i = 0; i < todo.length; i++) { + if (this.isCancel()) { + throw this._makeCancelEvent(); } - - socket[kNoRef] = false - socket[kWriting] = false - socket[kReset] = false - socket[kBlocking] = false - socket[kParser] = new Parser(client, socket, llhttpInstance) + /* eslint no-await-in-loop: [0] */ + await uploadPartJob(this, todo[i]); } + } else { + // upload in parallel + const jobErr = await this._parallel(todo, parallel, value => { + return new Promise((resolve, reject) => { + uploadPartJob(that, value) + .then(() => { + resolve(); + }) + .catch(reject); + }); + }); - socket[kCounter] = 0 - socket[kMaxRequests] = client[kMaxRequests] - socket[kClient] = client - socket[kError] = null - - socket - .on('error', onSocketError) - .on('readable', onSocketReadable) - .on('end', onSocketEnd) - .on('close', onSocketClose) - - client[kSocket] = socket + const abortEvent = jobErr.find(err => err.name === 'abort'); + if (abortEvent) throw abortEvent; - if (channels.connected.hasSubscribers) { - channels.connected.publish({ - connectParams: { - host, - hostname, - protocol, - port, - servername: client[kServerName], - localAddress: client[kLocalAddress] - }, - connector: client[kConnector], - socket - }) - } - client.emit('connect', client[kUrl], [client]) - } catch (err) { - if (client.destroyed) { - return + if (this.isCancel()) { + uploadPartJob = null; + throw this._makeCancelEvent(); } - client[kConnecting] = false - - if (channels.connectError.hasSubscribers) { - channels.connectError.publish({ - connectParams: { - host, - hostname, - protocol, - port, - servername: client[kServerName], - localAddress: client[kLocalAddress] - }, - connector: client[kConnector], - error: err - }) + if (jobErr && jobErr.length > 0) { + jobErr[0].message = `Failed to upload some parts with error: ${jobErr[0].toString()} part_num: ${ + jobErr[0].partNum + }`; + throw jobErr[0]; } + } - if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { - assert(client[kRunning] === 0) - while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { - const request = client[kQueue][client[kPendingIdx]++] - errorRequest(client, request, err) - } - } else { - onError(client, err) - } + return await this.completeMultipartUpload(name, uploadId, doneParts, options); +}; - client.emit('connectionError', client[kUrl], [client], err) +/** + * Get file size + */ +proto._getFileSize = async function _getFileSize(file) { + if (isBuffer(file)) { + return file.length; + } else if (isFile(file)) { + return file.size; + } else if (is.string(file)) { + const stat = await this._statFile(file); + return stat.size; } - resume(client) -} + throw new Error('_getFileSize requires Buffer/File/String.'); +}; -function emitDrain (client) { - client[kNeedDrain] = 0 - client.emit('drain', client[kUrl], [client]) -} +/* + * Readable stream for Web File + */ +const { Readable } = __nccwpck_require__(12781); -function resume (client, sync) { - if (client[kResuming] === 2) { - return +function WebFileReadStream(file, options) { + if (!(this instanceof WebFileReadStream)) { + return new WebFileReadStream(file, options); } - client[kResuming] = 2 - - _resume(client, sync) - client[kResuming] = 0 + Readable.call(this, options); - if (client[kRunningIdx] > 256) { - client[kQueue].splice(0, client[kRunningIdx]) - client[kPendingIdx] -= client[kRunningIdx] - client[kRunningIdx] = 0 - } + this.file = file; + this.reader = new FileReader(); + this.start = 0; + this.finish = false; + this.fileBuffer = null; } +util.inherits(WebFileReadStream, Readable); -function _resume (client, sync) { - while (true) { - if (client.destroyed) { - assert(client[kPending] === 0) - return +WebFileReadStream.prototype.readFileAndPush = function readFileAndPush(size) { + if (this.fileBuffer) { + let pushRet = true; + while (pushRet && this.fileBuffer && this.start < this.fileBuffer.length) { + const { start } = this; + let end = start + size; + end = end > this.fileBuffer.length ? this.fileBuffer.length : end; + this.start = end; + pushRet = this.push(this.fileBuffer.slice(start, end)); } + } +}; - if (client[kClosedResolve] && !client[kSize]) { - client[kClosedResolve]() - client[kClosedResolve] = null - return +WebFileReadStream.prototype._read = function _read(size) { + if ( + (this.file && this.start >= this.file.size) || + (this.fileBuffer && this.start >= this.fileBuffer.length) || + this.finish || + (this.start === 0 && !this.file) + ) { + if (!this.finish) { + this.fileBuffer = null; + this.finish = true; } + this.push(null); + return; + } - const socket = client[kSocket] - - if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') { - if (client[kSize] === 0) { - if (!socket[kNoRef] && socket.unref) { - socket.unref() - socket[kNoRef] = true - } - } else if (socket[kNoRef] && socket.ref) { - socket.ref() - socket[kNoRef] = false - } + const defaultReadSize = 16 * 1024; + size = size || defaultReadSize; - if (client[kSize] === 0) { - if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { - socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE) - } - } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { - if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { - const request = client[kQueue][client[kRunningIdx]] - const headersTimeout = request.headersTimeout != null - ? request.headersTimeout - : client[kHeadersTimeout] - socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS) - } - } + const that = this; + this.reader.onload = function (e) { + that.fileBuffer = Buffer.from(new Uint8Array(e.target.result)); + that.file = null; + that.readFileAndPush(size); + }; + this.reader.onerror = function onload(e) { + const error = e.srcElement && e.srcElement.error; + if (error) { + throw error; } + throw e; + }; - if (client[kBusy]) { - client[kNeedDrain] = 2 - } else if (client[kNeedDrain] === 2) { - if (sync) { - client[kNeedDrain] = 1 - process.nextTick(emitDrain, client) - } else { - emitDrain(client) + if (this.start === 0) { + this.reader.readAsArrayBuffer(this.file); + } else { + this.readFileAndPush(size); + } +}; + +proto._createStream = function _createStream(file, start, end) { + if (is.readableStream(file)) { + return file; + } else if (isFile(file)) { + return new WebFileReadStream(file.slice(start, end)); + } else if (isBuffer(file)) { + const iterable = file.subarray(start, end); + // we can't use Readable.from() since it is only support in Node v10 + return new Readable({ + read() { + this.push(iterable); + this.push(null); } - continue - } + }); + } else if (is.string(file)) { + return fs.createReadStream(file, { + start, + end: end - 1 + }); + } + throw new Error('_createStream requires Buffer/File/String.'); +}; - if (client[kPending] === 0) { - return - } +proto._getPartSize = function _getPartSize(fileSize, partSize) { + const maxNumParts = 10 * 1000; + const defaultPartSize = 1 * 1024 * 1024; - if (client[kRunning] >= (client[kPipelining] || 1)) { - return - } + if (!partSize) partSize = defaultPartSize; + const safeSize = Math.ceil(fileSize / maxNumParts); - const request = client[kQueue][client[kPendingIdx]] + if (partSize < safeSize) { + partSize = safeSize; + console.warn( + `partSize has been set to ${partSize}, because the partSize you provided causes partNumber to be greater than 10,000` + ); + } + return partSize; +}; - if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) { - if (client[kRunning] > 0) { - return - } +proto._divideParts = function _divideParts(fileSize, partSize) { + const numParts = Math.ceil(fileSize / partSize); - client[kServerName] = request.servername + const partOffs = []; + for (let i = 0; i < numParts; i++) { + const start = partSize * i; + const end = Math.min(start + partSize, fileSize); - if (socket && socket.servername !== request.servername) { - util.destroy(socket, new InformationalError('servername changed')) - return - } - } + partOffs.push({ + start, + end + }); + } - if (client[kConnecting]) { - return - } + return partOffs; +}; - if (!socket && !client[kHTTP2Session]) { - connect(client) - return - } - if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) { - return - } +/***/ }), - if (client[kRunning] > 0 && !request.idempotent) { - // Non-idempotent request cannot be retried. - // Ensure that no other requests are inflight and - // could cause failure. - return - } +/***/ 32557: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) { - // Don't dispatch an upgrade until all preceding requests have completed. - // A misbehaving server might upgrade the connection before all pipelined - // request has completed. - return - } +const debug = __nccwpck_require__(38237)('ali-oss:object'); +const fs = __nccwpck_require__(57147); +const is = __nccwpck_require__(65565); +const copy = __nccwpck_require__(30952); +const path = __nccwpck_require__(71017); +const mime = __nccwpck_require__(29994); +const callback = __nccwpck_require__(26725); +const { Transform } = __nccwpck_require__(12781); +const pump = __nccwpck_require__(18341); +const { isBuffer } = __nccwpck_require__(55850); +const { retry } = __nccwpck_require__(41514); +const { obj2xml } = __nccwpck_require__(96846); + +const proto = exports; - if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 && - (util.isStream(request.body) || util.isAsyncIterable(request.body))) { - // Request with stream or iterator body can error while other requests - // are inflight and indirectly error those as well. - // Ensure this doesn't happen by waiting for inflight - // to complete before dispatching. +/** + * Object operations + */ - // Request with stream or iterator body cannot be retried. - // Ensure that no other requests are inflight and - // could cause failure. - return - } +/** + * append an object from String(file path)/Buffer/ReadableStream + * @param {String} name the object key + * @param {Mixed} file String(file path)/Buffer/ReadableStream + * @param {Object} options + * @return {Object} + */ +proto.append = async function append(name, file, options) { + options = options || {}; + if (options.position === undefined) options.position = '0'; + options.subres = { + append: '', + position: options.position + }; + options.method = 'POST'; + + const result = await this.put(name, file, options); + result.nextAppendPosition = result.res.headers['x-oss-next-append-position']; + return result; +}; - if (!request.aborted && write(client, request)) { - client[kPendingIdx]++ - } else { - client[kQueue].splice(client[kPendingIdx], 1) - } +/** + * put an object from String(file path)/Buffer/ReadableStream + * @param {String} name the object key + * @param {Mixed} file String(file path)/Buffer/ReadableStream + * @param {Object} options + * {Object} options.callback The callback parameter is composed of a JSON string encoded in Base64 + * {String} options.callback.url the OSS sends a callback request to this URL + * {String} options.callback.host The host header value for initiating callback requests + * {String} options.callback.body The value of the request body when a callback is initiated + * {String} options.callback.contentType The Content-Type of the callback requests initiatiated + * {Object} options.callback.customValue Custom parameters are a map of key-values, e.g: + * customValue = { + * key1: 'value1', + * key2: 'value2' + * } + * @return {Object} + */ +proto.put = async function put(name, file, options) { + let content; + options = options || {}; + name = this._objectName(name); + + if (isBuffer(file)) { + content = file; + } else if (is.string(file)) { + const stats = fs.statSync(file); + if (!stats.isFile()) { + throw new Error(`${file} is not file`); + } + options.mime = options.mime || mime.getType(path.extname(file)); + options.contentLength = await this._getFileSize(file); + const getStream = () => fs.createReadStream(file); + const putStreamStb = (objectName, makeStream, configOption) => { + return this.putStream(objectName, makeStream(), configOption); + }; + return await retry(putStreamStb, this.options.retryMax, { + errorHandler: err => { + const _errHandle = _err => { + const statusErr = [-1, -2].includes(_err.status); + const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true); + return statusErr && requestErrorRetryHandle(_err); + }; + if (_errHandle(err)) return true; + return false; + } + })(name, getStream, options); + } else if (is.readableStream(file)) { + return await this.putStream(name, file, options); + } else { + throw new TypeError('Must provide String/Buffer/ReadableStream for put.'); } -} -// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2 -function shouldSendContentLength (method) { - return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT' -} + options.headers = options.headers || {}; + this._convertMetaToHeaders(options.meta, options.headers); -function write (client, request) { - if (client[kHTTPConnVersion] === 'h2') { - writeH2(client, client[kHTTP2Session], request) - return - } + const method = options.method || 'PUT'; + const params = this._objectRequestParams(method, name, options); - const { body, method, path, host, upgrade, headers, blocking, reset } = request + callback.encodeCallback(params, options); - // https://tools.ietf.org/html/rfc7231#section-4.3.1 - // https://tools.ietf.org/html/rfc7231#section-4.3.2 - // https://tools.ietf.org/html/rfc7231#section-4.3.5 + params.mime = options.mime; + params.content = content; + params.successStatuses = [200]; - // Sending a payload body on a request that does not - // expect it can cause undefined behavior on some - // servers and corrupt connection state. Do not - // re-use the connection for further requests. + const result = await this.request(params); - const expectsPayload = ( - method === 'PUT' || - method === 'POST' || - method === 'PATCH' - ) + const ret = { + name, + url: this._objectUrl(name), + res: result.res + }; - if (body && typeof body.read === 'function') { - // Try to read EOF in order to get length. - body.read(0) + if (params.headers && params.headers['x-oss-callback']) { + ret.data = JSON.parse(result.data.toString()); } - const bodyLength = util.bodyLength(body) + return ret; +}; - let contentLength = bodyLength +/** + * put an object from ReadableStream. If `options.contentLength` is + * not provided, chunked encoding is used. + * @param {String} name the object key + * @param {Readable} stream the ReadableStream + * @param {Object} options + * @return {Object} + */ +proto.putStream = async function putStream(name, stream, options) { + options = options || {}; + options.headers = options.headers || {}; + name = this._objectName(name); + if (options.contentLength) { + options.headers['Content-Length'] = options.contentLength; + } else { + options.headers['Transfer-Encoding'] = 'chunked'; + } + this._convertMetaToHeaders(options.meta, options.headers); + + const method = options.method || 'PUT'; + const params = this._objectRequestParams(method, name, options); + callback.encodeCallback(params, options); + params.mime = options.mime; + const transform = new Transform(); + // must remove http stream header for signature + transform._transform = function _transform(chunk, encoding, done) { + this.push(chunk); + done(); + }; + params.stream = pump(stream, transform); + params.successStatuses = [200]; + + const result = await this.request(params); + + const ret = { + name, + url: this._objectUrl(name), + res: result.res + }; - if (contentLength === null) { - contentLength = request.contentLength + if (params.headers && params.headers['x-oss-callback']) { + ret.data = JSON.parse(result.data.toString()); } - if (contentLength === 0 && !expectsPayload) { - // https://tools.ietf.org/html/rfc7230#section-3.3.2 - // A user agent SHOULD NOT send a Content-Length header field when - // the request message does not contain a payload body and the method - // semantics do not anticipate such a body. + return ret; +}; - contentLength = null +proto.getStream = async function getStream(name, options) { + options = options || {}; + + if (options.process) { + options.subres = options.subres || {}; + options.subres['x-oss-process'] = options.process; } - // https://github.com/nodejs/undici/issues/2046 - // A user agent may send a Content-Length header with 0 value, this should be allowed. - if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { - if (client[kStrictContentLength]) { - errorRequest(client, request, new RequestContentLengthMismatchError()) - return false + const params = this._objectRequestParams('GET', name, options); + params.customResponse = true; + params.successStatuses = [200, 206, 304]; + + const result = await this.request(params); + + return { + stream: result.res, + res: { + status: result.status, + headers: result.headers } + }; +}; - process.emitWarning(new RequestContentLengthMismatchError()) +proto.putMeta = async function putMeta(name, meta, options) { + return await this.copy(name, name, { + meta: meta || {}, + timeout: options && options.timeout, + ctx: options && options.ctx + }); +}; + +proto.list = async function list(query, options) { + // prefix, marker, max-keys, delimiter + + const params = this._objectRequestParams('GET', '', options); + params.query = query; + params.xmlResponse = true; + params.successStatuses = [200]; + + const result = await this.request(params); + let objects = result.data.Contents || []; + const that = this; + if (objects) { + if (!Array.isArray(objects)) { + objects = [objects]; + } + objects = objects.map(obj => ({ + name: obj.Key, + url: that._objectUrl(obj.Key), + lastModified: obj.LastModified, + etag: obj.ETag, + type: obj.Type, + size: Number(obj.Size), + storageClass: obj.StorageClass, + owner: { + id: obj.Owner.ID, + displayName: obj.Owner.DisplayName + } + })); + } + let prefixes = result.data.CommonPrefixes || null; + if (prefixes) { + if (!Array.isArray(prefixes)) { + prefixes = [prefixes]; + } + prefixes = prefixes.map(item => item.Prefix); } + return { + res: result.res, + objects, + prefixes, + nextMarker: result.data.NextMarker || null, + isTruncated: result.data.IsTruncated === 'true' + }; +}; - const socket = client[kSocket] +proto.listV2 = async function listV2(query = {}, options = {}) { + const continuation_token = query['continuation-token'] || query.continuationToken; + delete query['continuation-token']; + delete query.continuationToken; + if (continuation_token) { + options.subres = Object.assign( + { + 'continuation-token': continuation_token + }, + options.subres + ); + } + const params = this._objectRequestParams('GET', '', options); + params.query = Object.assign({ 'list-type': 2 }, query); + delete params.query['continuation-token']; + delete query.continuationToken; + params.xmlResponse = true; + params.successStatuses = [200]; + + const result = await this.request(params); + let objects = result.data.Contents || []; + const that = this; + if (objects) { + if (!Array.isArray(objects)) { + objects = [objects]; + } + + objects = objects.map(obj => { + let owner = null; + if (obj.Owner) { + owner = { + id: obj.Owner.ID, + displayName: obj.Owner.DisplayName + }; + } + return { + name: obj.Key, + url: that._objectUrl(obj.Key), + lastModified: obj.LastModified, + etag: obj.ETag, + type: obj.Type, + size: Number(obj.Size), + storageClass: obj.StorageClass, + owner + }; + }); + } + let prefixes = result.data.CommonPrefixes || null; + if (prefixes) { + if (!Array.isArray(prefixes)) { + prefixes = [prefixes]; + } + prefixes = prefixes.map(item => item.Prefix); + } + return { + res: result.res, + objects, + prefixes, + isTruncated: result.data.IsTruncated === 'true', + keyCount: +result.data.KeyCount, + continuationToken: result.data.ContinuationToken || null, + nextContinuationToken: result.data.NextContinuationToken || null + }; +}; - try { - request.onConnect((err) => { - if (request.aborted || request.completed) { - return +/** + * Restore Object + * @param {String} name the object key + * @param {Object} options {type : Archive or ColdArchive} + * @returns {{res}} + */ +proto.restore = async function restore(name, options = { type: 'Archive' }) { + options = options || {}; + options.subres = Object.assign({ restore: '' }, options.subres); + if (options.versionId) { + options.subres.versionId = options.versionId; + } + const params = this._objectRequestParams('POST', name, options); + if (options.type === 'ColdArchive') { + const paramsXMLObj = { + RestoreRequest: { + Days: options.Days ? options.Days : 2, + JobParameters: { + Tier: options.JobParameters ? options.JobParameters : 'Standard' + } } + }; + params.content = obj2xml(paramsXMLObj, { + headers: true + }); + params.mime = 'xml'; + } + params.successStatuses = [202]; - errorRequest(client, request, err || new RequestAbortedError()) + const result = await this.request(params); - util.destroy(socket, new InformationalError('aborted')) - }) - } catch (err) { - errorRequest(client, request, err) - } + return { + res: result.res + }; +}; - if (request.aborted) { - return false - } +proto._objectUrl = function _objectUrl(name) { + return this._getReqUrl({ bucket: this.options.bucket, object: name }); +}; - if (method === 'HEAD') { - // https://github.com/mcollina/undici/issues/258 - // Close after a HEAD request to interop with misbehaving servers - // that may send a body in the response. +/** + * generator request params + * @return {Object} params + * + * @api private + */ - socket[kReset] = true +proto._objectRequestParams = function (method, name, options) { + if (!this.options.bucket && !this.options.cname) { + throw new Error('Please create a bucket first'); } - if (upgrade || method === 'CONNECT') { - // On CONNECT or upgrade, block pipeline from dispatching further - // requests on this connection. + options = options || {}; + name = this._objectName(name); + const params = { + object: name, + bucket: this.options.bucket, + method, + subres: options && options.subres, + additionalHeaders: options && options.additionalHeaders, + timeout: options && options.timeout, + ctx: options && options.ctx + }; - socket[kReset] = true + if (options.headers) { + params.headers = {}; + copy(options.headers).to(params.headers); } + return params; +}; - if (reset != null) { - socket[kReset] = reset - } +proto._objectName = function (name) { + return name.replace(/^\/+/, ''); +}; - if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) { - socket[kReset] = true - } +proto._statFile = function (filepath) { + return new Promise((resolve, reject) => { + fs.stat(filepath, (err, stats) => { + if (err) { + reject(err); + } else { + resolve(stats); + } + }); + }); +}; - if (blocking) { - socket[kBlocking] = true +proto._convertMetaToHeaders = function (meta, headers) { + if (!meta) { + return; } - let header = `${method} ${path} HTTP/1.1\r\n` + Object.keys(meta).forEach(k => { + headers[`x-oss-meta-${k}`] = meta[k]; + }); +}; - if (typeof host === 'string') { - header += `host: ${host}\r\n` - } else { - header += client[kHostHeader] - } +proto._deleteFileSafe = function (filepath) { + return new Promise(resolve => { + fs.exists(filepath, exists => { + if (!exists) { + resolve(); + } else { + fs.unlink(filepath, err => { + if (err) { + debug('unlink %j error: %s', filepath, err); + } + resolve(); + }); + } + }); + }); +}; - if (upgrade) { - header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n` - } else if (client[kPipelining] && !socket[kReset]) { - header += 'connection: keep-alive\r\n' - } else { - header += 'connection: close\r\n' - } - if (headers) { - header += headers - } +/***/ }), - if (channels.sendHeaders.hasSubscribers) { - channels.sendHeaders.publish({ request, headers: header, socket }) - } +/***/ 59164: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - /* istanbul ignore else: assertion */ - if (!body || bodyLength === 0) { - if (contentLength === 0) { - socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') - } else { - assert(contentLength === null, 'no body must not have content length') - socket.write(`${header}\r\n`, 'latin1') - } - request.onRequestSent() - } else if (util.isBuffer(body)) { - assert(contentLength === body.byteLength, 'buffer body must have content length') +/** + * Copyright(c) ali-sdk and other contributors. + * MIT Licensed + * + * Authors: + * rockuw (http://rockuw.com) + */ - socket.cork() - socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') - socket.write(body) - socket.uncork() - request.onBodySent(body) - request.onRequestSent() - if (!expectsPayload) { - socket[kReset] = true - } - } else if (util.isBlobLike(body)) { - if (typeof body.stream === 'function') { - writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload }) - } else { - writeBlob({ body, client, request, socket, contentLength, header, expectsPayload }) - } - } else if (util.isStream(body)) { - writeStream({ body, client, request, socket, contentLength, header, expectsPayload }) - } else if (util.isIterable(body)) { - writeIterable({ body, client, request, socket, contentLength, header, expectsPayload }) - } else { - assert(false) - } +/** + * Module dependencies. + */ - return true -} +const jstoxml = __nccwpck_require__(53094); +const utility = __nccwpck_require__(33877); +const copy = __nccwpck_require__(30952); +const urlutil = __nccwpck_require__(57310); -function writeH2 (client, session, request) { - const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request +const proto = exports; - let headers - if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()) - else headers = reqHeaders +/** + * RTMP operations + */ - if (upgrade) { - errorRequest(client, request, new Error('Upgrade not supported for H2')) - return false - } +/** + * Create a live channel + * @param {String} id the channel id + * @param {Object} conf the channel configuration + * @param {Object} options + * @return {Object} + */ +proto.putChannel = async function putChannel(id, conf, options) { + options = options || {}; + options.subres = 'live'; - try { - // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event? - request.onConnect((err) => { - if (request.aborted || request.completed) { - return - } + const params = this._objectRequestParams('PUT', id, options); + params.xmlResponse = true; + params.content = jstoxml.toXML({ + LiveChannelConfiguration: conf + }); + params.successStatuses = [200]; - errorRequest(client, request, err || new RequestAbortedError()) - }) - } catch (err) { - errorRequest(client, request, err) - } + const result = await this.request(params); - if (request.aborted) { - return false + let publishUrls = result.data.PublishUrls.Url; + if (!Array.isArray(publishUrls)) { + publishUrls = [publishUrls]; + } + let playUrls = result.data.PlayUrls.Url; + if (!Array.isArray(playUrls)) { + playUrls = [playUrls]; } - /** @type {import('node:http2').ClientHttp2Stream} */ - let stream - const h2State = client[kHTTP2SessionState] + return { + publishUrls, + playUrls, + res: result.res + }; +}; - headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost] - headers[HTTP2_HEADER_METHOD] = method +/** + * Get the channel info + * @param {String} id the channel id + * @param {Object} options + * @return {Object} + */ +proto.getChannel = async function getChannel(id, options) { + options = options || {}; + options.subres = 'live'; - if (method === 'CONNECT') { - session.ref() - // we are already connected, streams are pending, first request - // will create a new stream. We trigger a request to create the stream and wait until - // `ready` event is triggered - // We disabled endStream to allow the user to write to the stream - stream = session.request(headers, { endStream: false, signal }) + const params = this._objectRequestParams('GET', id, options); + params.xmlResponse = true; + params.successStatuses = [200]; - if (stream.id && !stream.pending) { - request.onUpgrade(null, null, stream) - ++h2State.openStreams - } else { - stream.once('ready', () => { - request.onUpgrade(null, null, stream) - ++h2State.openStreams - }) - } + const result = await this.request(params); - stream.once('close', () => { - h2State.openStreams -= 1 - // TODO(HTTP/2): unref only if current streams count is 0 - if (h2State.openStreams === 0) session.unref() - }) + return { + data: result.data, + res: result.res + }; +}; - return true - } +/** + * Delete the channel + * @param {String} id the channel id + * @param {Object} options + * @return {Object} + */ +proto.deleteChannel = async function deleteChannel(id, options) { + options = options || {}; + options.subres = 'live'; - // https://tools.ietf.org/html/rfc7540#section-8.3 - // :path and :scheme headers must be omited when sending CONNECT + const params = this._objectRequestParams('DELETE', id, options); + params.successStatuses = [204]; - headers[HTTP2_HEADER_PATH] = path - headers[HTTP2_HEADER_SCHEME] = 'https' + const result = await this.request(params); - // https://tools.ietf.org/html/rfc7231#section-4.3.1 - // https://tools.ietf.org/html/rfc7231#section-4.3.2 - // https://tools.ietf.org/html/rfc7231#section-4.3.5 + return { + res: result.res + }; +}; - // Sending a payload body on a request that does not - // expect it can cause undefined behavior on some - // servers and corrupt connection state. Do not - // re-use the connection for further requests. +/** + * Set the channel status + * @param {String} id the channel id + * @param {String} status the channel status + * @param {Object} options + * @return {Object} + */ +proto.putChannelStatus = async function putChannelStatus(id, status, options) { + options = options || {}; + options.subres = { + live: null, + status + }; - const expectsPayload = ( - method === 'PUT' || - method === 'POST' || - method === 'PATCH' - ) + const params = this._objectRequestParams('PUT', id, options); + params.successStatuses = [200]; - if (body && typeof body.read === 'function') { - // Try to read EOF in order to get length. - body.read(0) - } + const result = await this.request(params); - let contentLength = util.bodyLength(body) + return { + res: result.res + }; +}; - if (contentLength == null) { - contentLength = request.contentLength - } +/** + * Get the channel status + * @param {String} id the channel id + * @param {Object} options + * @return {Object} + */ +proto.getChannelStatus = async function getChannelStatus(id, options) { + options = options || {}; + options.subres = { + live: null, + comp: 'stat' + }; - if (contentLength === 0 || !expectsPayload) { - // https://tools.ietf.org/html/rfc7230#section-3.3.2 - // A user agent SHOULD NOT send a Content-Length header field when - // the request message does not contain a payload body and the method - // semantics do not anticipate such a body. + const params = this._objectRequestParams('GET', id, options); + params.xmlResponse = true; + params.successStatuses = [200]; - contentLength = null - } + const result = await this.request(params); - // https://github.com/nodejs/undici/issues/2046 - // A user agent may send a Content-Length header with 0 value, this should be allowed. - if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { - if (client[kStrictContentLength]) { - errorRequest(client, request, new RequestContentLengthMismatchError()) - return false - } + return { + data: result.data, + res: result.res + }; +}; - process.emitWarning(new RequestContentLengthMismatchError()) - } +/** + * List the channels + * @param {Object} query the query parameters + * filter options: + * - prefix {String}: the channel id prefix (returns channels with this prefix) + * - marker {String}: the channle id marker (returns channels after this id) + * - max-keys {Number}: max number of channels to return + * @param {Object} options + * @return {Object} + */ +proto.listChannels = async function listChannels(query, options) { + // prefix, marker, max-keys - if (contentLength != null) { - assert(body, 'no body must not have content length') - headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}` - } + options = options || {}; + options.subres = 'live'; - session.ref() + const params = this._objectRequestParams('GET', '', options); + params.query = query; + params.xmlResponse = true; + params.successStatuses = [200]; - const shouldEndStream = method === 'GET' || method === 'HEAD' - if (expectContinue) { - headers[HTTP2_HEADER_EXPECT] = '100-continue' - stream = session.request(headers, { endStream: shouldEndStream, signal }) + const result = await this.request(params); - stream.once('continue', writeBodyH2) - } else { - stream = session.request(headers, { - endStream: shouldEndStream, - signal - }) - writeBodyH2() + let channels = result.data.LiveChannel || []; + if (!Array.isArray(channels)) { + channels = [channels]; } - // Increment counter as we have new several streams open - ++h2State.openStreams + channels = channels.map(x => { + x.PublishUrls = x.PublishUrls.Url; + if (!Array.isArray(x.PublishUrls)) { + x.PublishUrls = [x.PublishUrls]; + } + x.PlayUrls = x.PlayUrls.Url; + if (!Array.isArray(x.PlayUrls)) { + x.PlayUrls = [x.PlayUrls]; + } - stream.once('response', headers => { - const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers + return x; + }); - if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { - stream.pause() - } - }) + return { + channels, + nextMarker: result.data.NextMarker || null, + isTruncated: result.data.IsTruncated === 'true', + res: result.res + }; +}; - stream.once('end', () => { - request.onComplete([]) - }) +/** + * Get the channel history + * @param {String} id the channel id + * @param {Object} options + * @return {Object} + */ +proto.getChannelHistory = async function getChannelHistory(id, options) { + options = options || {}; + options.subres = { + live: null, + comp: 'history' + }; - stream.on('data', (chunk) => { - if (request.onData(chunk) === false) { - stream.pause() - } - }) + const params = this._objectRequestParams('GET', id, options); + params.xmlResponse = true; + params.successStatuses = [200]; - stream.once('close', () => { - h2State.openStreams -= 1 - // TODO(HTTP/2): unref only if current streams count is 0 - if (h2State.openStreams === 0) { - session.unref() - } - }) + const result = await this.request(params); - stream.once('error', function (err) { - if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { - h2State.streams -= 1 - util.destroy(stream, err) - } - }) + let records = result.data.LiveRecord || []; + if (!Array.isArray(records)) { + records = [records]; + } + return { + records, + res: result.res + }; +}; - stream.once('frameError', (type, code) => { - const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) - errorRequest(client, request, err) +/** + * Create vod playlist + * @param {String} id the channel id + * @param {String} name the playlist name + * @param {Object} time the begin and end time + * time: + * - startTime {Number}: the begin time in epoch seconds + * - endTime {Number}: the end time in epoch seconds + * @param {Object} options + * @return {Object} + */ +proto.createVod = async function createVod(id, name, time, options) { + options = options || {}; + options.subres = { + vod: null + }; + copy(time).to(options.subres); - if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { - h2State.streams -= 1 - util.destroy(stream, err) - } - }) + const params = this._objectRequestParams('POST', `${id}/${name}`, options); + params.query = time; + params.successStatuses = [200]; - // stream.on('aborted', () => { - // // TODO(HTTP/2): Support aborted - // }) + const result = await this.request(params); - // stream.on('timeout', () => { - // // TODO(HTTP/2): Support timeout - // }) + return { + res: result.res + }; +}; - // stream.on('push', headers => { - // // TODO(HTTP/2): Suppor push - // }) +/** + * Get RTMP Url + * @param {String} channelId the channel id + * @param {Object} options + * options: + * - expires {Number}: expire time in seconds + * - params {Object}: the parameters such as 'playlistName' + * @return {String} the RTMP url + */ +proto.getRtmpUrl = function (channelId, options) { + options = options || {}; + const expires = utility.timestamp() + (options.expires || 1800); + const res = { + bucket: this.options.bucket, + object: this._objectName(`live/${channelId}`) + }; + const resource = `/${res.bucket}/${channelId}`; + + options.params = options.params || {}; + const query = Object.keys(options.params) + .sort() + .map(x => `${x}:${options.params[x]}\n`) + .join(''); + + const stringToSign = `${expires}\n${query}${resource}`; + const signature = this.signature(stringToSign); + + const url = urlutil.parse(this._getReqUrl(res)); + url.protocol = 'rtmp:'; + url.query = { + OSSAccessKeyId: this.options.accessKeyId, + Expires: expires, + Signature: signature + }; + copy(options.params).to(url.query); + + return url.format(); +}; - // stream.on('trailers', headers => { - // // TODO(HTTP/2): Support trailers - // }) - return true +/***/ }), - function writeBodyH2 () { - /* istanbul ignore else: assertion */ - if (!body) { - request.onRequestSent() - } else if (util.isBuffer(body)) { - assert(contentLength === body.byteLength, 'buffer body must have content length') - stream.cork() - stream.write(body) - stream.uncork() - stream.end() - request.onBodySent(body) - request.onRequestSent() - } else if (util.isBlobLike(body)) { - if (typeof body.stream === 'function') { - writeIterable({ - client, - request, - contentLength, - h2stream: stream, - expectsPayload, - body: body.stream(), - socket: client[kSocket], - header: '' - }) - } else { - writeBlob({ - body, - client, - request, - contentLength, - expectsPayload, - h2stream: stream, - header: '', - socket: client[kSocket] - }) - } - } else if (util.isStream(body)) { - writeStream({ - body, - client, - request, - contentLength, - expectsPayload, - socket: client[kSocket], - h2stream: stream, - header: '' - }) - } else if (util.isIterable(body)) { - writeIterable({ - body, - client, - request, - contentLength, - expectsPayload, - header: '', - h2stream: stream, - socket: client[kSocket] - }) - } else { - assert(false) - } - } -} +/***/ 34418: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { - assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined') +const debug = __nccwpck_require__(38237)('ali-oss:sts'); +const crypto = __nccwpck_require__(6113); +const querystring = __nccwpck_require__(63477); +const copy = __nccwpck_require__(30952); +const AgentKeepalive = __nccwpck_require__(34623); +const is = __nccwpck_require__(65565); +const ms = __nccwpck_require__(10845); +const urllib = __nccwpck_require__(64783); - if (client[kHTTPConnVersion] === 'h2') { - // For HTTP/2, is enough to pipe the stream - const pipe = pipeline( - body, - h2stream, - (err) => { - if (err) { - util.destroy(body, err) - util.destroy(h2stream, err) - } else { - request.onRequestSent() - } - } - ) +const globalHttpAgent = new AgentKeepalive(); - pipe.on('data', onPipeData) - pipe.once('end', () => { - pipe.removeListener('data', onPipeData) - util.destroy(pipe) - }) +function STS(options) { + if (!(this instanceof STS)) { + return new STS(options); + } - function onPipeData (chunk) { - request.onBodySent(chunk) - } + if (!options || !options.accessKeyId || !options.accessKeySecret) { + throw new Error('require accessKeyId, accessKeySecret'); + } - return + this.options = { + endpoint: options.endpoint || 'https://sts.aliyuncs.com', + format: 'JSON', + apiVersion: '2015-04-01', + sigMethod: 'HMAC-SHA1', + sigVersion: '1.0', + timeout: '60s' + }; + copy(options).to(this.options); + + // support custom agent and urllib client + if (this.options.urllib) { + this.urllib = this.options.urllib; + } else { + this.urllib = urllib; + this.agent = this.options.agent || globalHttpAgent; } +} - let finished = false +module.exports = STS; - const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) +const proto = STS.prototype; - const onData = function (chunk) { - if (finished) { - return - } +/** + * STS opertaions + */ - try { - if (!writer.write(chunk) && this.pause) { - this.pause() +proto.assumeRole = async function assumeRole(role, policy, expiration, session, options) { + const opts = this.options; + const params = { + Action: 'AssumeRole', + RoleArn: role, + RoleSessionName: session || 'app', + DurationSeconds: expiration || 3600, + + Format: opts.format, + Version: opts.apiVersion, + AccessKeyId: opts.accessKeyId, + SignatureMethod: opts.sigMethod, + SignatureVersion: opts.sigVersion, + SignatureNonce: Math.random(), + Timestamp: new Date().toISOString() + }; + + if (policy) { + let policyStr; + if (is.string(policy)) { + try { + policyStr = JSON.stringify(JSON.parse(policy)); + } catch (err) { + throw new Error(`Policy string is not a valid JSON: ${err.message}`); } - } catch (err) { - util.destroy(this, err) + } else { + policyStr = JSON.stringify(policy); } + params.Policy = policyStr; } - const onDrain = function () { - if (finished) { - return - } - if (body.resume) { - body.resume() - } + const signature = this._getSignature('POST', params, opts.accessKeySecret); + params.Signature = signature; + + const reqUrl = opts.endpoint; + const reqParams = { + agent: this.agent, + timeout: ms((options && options.timeout) || opts.timeout), + method: 'POST', + content: querystring.stringify(params), + headers: { + 'Content-Type': 'application/x-www-form-urlencoded' + }, + ctx: options && options.ctx + }; + + const result = await this.urllib.request(reqUrl, reqParams); + debug('response %s %s, got %s, headers: %j', reqParams.method, reqUrl, result.status, result.headers); + + if (Math.floor(result.status / 100) !== 2) { + const err = await this._requestError(result); + err.params = reqParams; + throw err; } - const onAbort = function () { - if (finished) { - return - } - const err = new RequestAbortedError() - queueMicrotask(() => onFinished(err)) + result.data = JSON.parse(result.data); + + return { + res: result.res, + credentials: result.data.Credentials + }; +}; + +proto._requestError = async function _requestError(result) { + const err = new Error(); + err.status = result.status; + + try { + const resp = (await JSON.parse(result.data)) || {}; + err.code = resp.Code; + err.message = `${resp.Code}: ${resp.Message}`; + err.requestId = resp.RequestId; + } catch (e) { + err.message = `UnknownError: ${String(result.data)}`; } - const onFinished = function (err) { - if (finished) { - return - } - finished = true + return err; +}; - assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1)) +proto._getSignature = function _getSignature(method, params, key) { + const that = this; + const canoQuery = Object.keys(params) + .sort() + .map(k => `${that._escape(k)}=${that._escape(params[k])}`) + .join('&'); - socket - .off('drain', onDrain) - .off('error', onFinished) + const stringToSign = `${method.toUpperCase()}&${this._escape('/')}&${this._escape(canoQuery)}`; - body - .removeListener('data', onData) - .removeListener('end', onFinished) - .removeListener('error', onFinished) - .removeListener('close', onAbort) + debug('string to sign: %s', stringToSign); - if (!err) { - try { - writer.end() - } catch (er) { - err = er - } - } + let signature = crypto.createHmac('sha1', `${key}&`); + signature = signature.update(stringToSign).digest('base64'); - writer.destroy(err) + debug('signature: %s', signature); - if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) { - util.destroy(body, err) - } else { - util.destroy(body) - } - } + return signature; +}; - body - .on('data', onData) - .on('end', onFinished) - .on('error', onFinished) - .on('close', onAbort) +/** + * Since `encodeURIComponent` doesn't encode '*', which causes + * 'SignatureDoesNotMatch'. We need do it ourselves. + */ +proto._escape = function _escape(str) { + return encodeURIComponent(str) + .replace(/!/g, '%21') + .replace(/'/g, '%27') + .replace(/\(/g, '%28') + .replace(/\)/g, '%29') + .replace(/\*/g, '%2A'); +}; - if (body.resume) { - body.resume() - } - socket - .on('drain', onDrain) - .on('error', onFinished) -} +/***/ }), -async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { - assert(contentLength === body.size, 'blob body must have content length') +/***/ 55768: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - const isH2 = client[kHTTPConnVersion] === 'h2' - try { - if (contentLength != null && contentLength !== body.size) { - throw new RequestContentLengthMismatchError() - } +module.exports = __nccwpck_require__(56196)().Promise - const buffer = Buffer.from(await body.arrayBuffer()) - if (isH2) { - h2stream.cork() - h2stream.write(buffer) - h2stream.uncork() - } else { - socket.cork() - socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') - socket.write(buffer) - socket.uncork() - } +/***/ }), - request.onBodySent(buffer) - request.onRequestSent() +/***/ 34549: +/***/ ((module) => { - if (!expectsPayload) { - socket[kReset] = true +"use strict"; + + // global key for user preferred registration +var REGISTRATION_KEY = '@@any-promise/REGISTRATION', + // Prior registration (preferred or detected) + registered = null + +/** + * Registers the given implementation. An implementation must + * be registered prior to any call to `require("any-promise")`, + * typically on application load. + * + * If called with no arguments, will return registration in + * following priority: + * + * For Node.js: + * + * 1. Previous registration + * 2. global.Promise if node.js version >= 0.12 + * 3. Auto detected promise based on first sucessful require of + * known promise libraries. Note this is a last resort, as the + * loaded library is non-deterministic. node.js >= 0.12 will + * always use global.Promise over this priority list. + * 4. Throws error. + * + * For Browser: + * + * 1. Previous registration + * 2. window.Promise + * 3. Throws error. + * + * Options: + * + * Promise: Desired Promise constructor + * global: Boolean - Should the registration be cached in a global variable to + * allow cross dependency/bundle registration? (default true) + */ +module.exports = function(root, loadImplementation){ + return function register(implementation, opts){ + implementation = implementation || null + opts = opts || {} + // global registration unless explicitly {global: false} in options (default true) + var registerGlobal = opts.global !== false; + + // load any previous global registration + if(registered === null && registerGlobal){ + registered = root[REGISTRATION_KEY] || null + } + + if(registered !== null + && implementation !== null + && registered.implementation !== implementation){ + // Throw error if attempting to redefine implementation + throw new Error('any-promise already defined as "'+registered.implementation+ + '". You can only register an implementation before the first '+ + ' call to require("any-promise") and an implementation cannot be changed') + } + + if(registered === null){ + // use provided implementation + if(implementation !== null && typeof opts.Promise !== 'undefined'){ + registered = { + Promise: opts.Promise, + implementation: implementation + } + } else { + // require implementation if implementation is specified but not provided + registered = loadImplementation(implementation) + } + + if(registerGlobal){ + // register preference globally in case multiple installations + root[REGISTRATION_KEY] = registered + } } - resume(client) - } catch (err) { - util.destroy(isH2 ? h2stream : socket, err) + return registered } } -async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { - assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined') - let callback = null - function onDrain () { - if (callback) { - const cb = callback - callback = null - cb() +/***/ }), + +/***/ 56196: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +module.exports = __nccwpck_require__(34549)(global, loadImplementation); + +/** + * Node.js version of loadImplementation. + * + * Requires the given implementation and returns the registration + * containing {Promise, implementation} + * + * If implementation is undefined or global.Promise, loads it + * Otherwise uses require + */ +function loadImplementation(implementation){ + var impl = null + + if(shouldPreferGlobalPromise(implementation)){ + // if no implementation or env specified use global.Promise + impl = { + Promise: global.Promise, + implementation: 'global.Promise' + } + } else if(implementation){ + // if implementation specified, require it + var lib = require(implementation) + impl = { + Promise: lib.Promise || lib, + implementation: implementation } + } else { + // try to auto detect implementation. This is non-deterministic + // and should prefer other branches, but this is our last chance + // to load something without throwing error + impl = tryAutoDetect() } - const waitForDrain = () => new Promise((resolve, reject) => { - assert(callback === null) + if(impl === null){ + throw new Error('Cannot find any-promise implementation nor'+ + ' global.Promise. You must install polyfill or call'+ + ' require("any-promise/register") with your preferred'+ + ' implementation, e.g. require("any-promise/register/bluebird")'+ + ' on application load prior to any require("any-promise").') + } - if (socket[kError]) { - reject(socket[kError]) - } else { - callback = resolve - } - }) + return impl +} - if (client[kHTTPConnVersion] === 'h2') { - h2stream - .on('close', onDrain) - .on('drain', onDrain) +/** + * Determines if the global.Promise should be preferred if an implementation + * has not been registered. + */ +function shouldPreferGlobalPromise(implementation){ + if(implementation){ + return implementation === 'global.Promise' + } else if(typeof global.Promise !== 'undefined'){ + // Load global promise if implementation not specified + // Versions < 0.11 did not have global Promise + // Do not use for version < 0.12 as version 0.11 contained buggy versions + var version = (/v(\d+)\.(\d+)\.(\d+)/).exec(process.version) + return !(version && +version[1] == 0 && +version[2] < 12) + } + + // do not have global.Promise or another implementation was specified + return false +} +/** + * Look for common libs as last resort there is no guarantee that + * this will return a desired implementation or even be deterministic. + * The priority is also nearly arbitrary. We are only doing this + * for older versions of Node.js <0.12 that do not have a reasonable + * global.Promise implementation and we the user has not registered + * the preference. This preserves the behavior of any-promise <= 0.1 + * and may be deprecated or removed in the future + */ +function tryAutoDetect(){ + var libs = [ + "es6-promise", + "promise", + "native-promise-only", + "bluebird", + "rsvp", + "when", + "q", + "pinkie", + "lie", + "vow"] + var i = 0, len = libs.length + for(; i < len; i++){ try { - // It's up to the user to somehow abort the async iterable. - for await (const chunk of body) { - if (socket[kError]) { - throw socket[kError] - } + return loadImplementation(libs[i]) + } catch(e){} + } + return null +} - const res = h2stream.write(chunk) - request.onBodySent(chunk) - if (!res) { - await waitForDrain() - } - } - } catch (err) { - h2stream.destroy(err) - } finally { - request.onRequestSent() - h2stream.end() - h2stream - .off('close', onDrain) - .off('drain', onDrain) - } - return - } +/***/ }), - socket - .on('close', onDrain) - .on('drain', onDrain) +/***/ 41542: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) - try { - // It's up to the user to somehow abort the async iterable. - for await (const chunk of body) { - if (socket[kError]) { - throw socket[kError] - } +"use strict"; - if (!writer.write(chunk)) { - await waitForDrain() - } - } +module.exports = __nccwpck_require__(80785); - writer.end() - } catch (err) { - writer.destroy(err) - } finally { - socket - .off('close', onDrain) - .off('drain', onDrain) - } -} -class AsyncWriter { - constructor ({ socket, request, contentLength, client, expectsPayload, header }) { - this.socket = socket - this.request = request - this.contentLength = contentLength - this.client = client - this.bytesWritten = 0 - this.expectsPayload = expectsPayload - this.header = header +/***/ }), - socket[kWriting] = true - } +/***/ 80785: +/***/ ((module) => { - write (chunk) { - const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this +"use strict"; - if (socket[kError]) { - throw socket[kError] - } - if (socket.destroyed) { - return false - } +var AsyncLock = function (opts) { + opts = opts || {}; + + this.Promise = opts.Promise || Promise; + + // format: {key : [fn, fn]} + // queues[key] = null indicates no job running for key + this.queues = Object.create(null); + + // lock is reentrant for same domain + this.domainReentrant = opts.domainReentrant || false; + if (this.domainReentrant) { + if (typeof process === 'undefined' || typeof process.domain === 'undefined') { + throw new Error( + 'Domain-reentrant locks require `process.domain` to exist. Please flip `opts.domainReentrant = false`, ' + + 'use a NodeJS version that still implements Domain, or install a browser polyfill.'); + } + // domain of current running func {key : fn} + this.domains = Object.create(null); + } + + this.timeout = opts.timeout || AsyncLock.DEFAULT_TIMEOUT; + this.maxOccupationTime = opts.maxOccupationTime || AsyncLock.DEFAULT_MAX_OCCUPATION_TIME; + this.maxExecutionTime = opts.maxExecutionTime || AsyncLock.DEFAULT_MAX_EXECUTION_TIME; + if (opts.maxPending === Infinity || (Number.isInteger(opts.maxPending) && opts.maxPending >= 0)) { + this.maxPending = opts.maxPending; + } else { + this.maxPending = AsyncLock.DEFAULT_MAX_PENDING; + } +}; - const len = Buffer.byteLength(chunk) - if (!len) { - return true - } +AsyncLock.DEFAULT_TIMEOUT = 0; //Never +AsyncLock.DEFAULT_MAX_OCCUPATION_TIME = 0; //Never +AsyncLock.DEFAULT_MAX_EXECUTION_TIME = 0; //Never +AsyncLock.DEFAULT_MAX_PENDING = 1000; - // We should defer writing chunks. - if (contentLength !== null && bytesWritten + len > contentLength) { - if (client[kStrictContentLength]) { - throw new RequestContentLengthMismatchError() - } +/** + * Acquire Locks + * + * @param {String|Array} key resource key or keys to lock + * @param {function} fn async function + * @param {function} cb callback function, otherwise will return a promise + * @param {Object} opts options + */ +AsyncLock.prototype.acquire = function (key, fn, cb, opts) { + if (Array.isArray(key)) { + return this._acquireBatch(key, fn, cb, opts); + } + + if (typeof (fn) !== 'function') { + throw new Error('You must pass a function to execute'); + } + + // faux-deferred promise using new Promise() (as Promise.defer is deprecated) + var deferredResolve = null; + var deferredReject = null; + var deferred = null; + + if (typeof (cb) !== 'function') { + opts = cb; + cb = null; + + // will return a promise + deferred = new this.Promise(function(resolve, reject) { + deferredResolve = resolve; + deferredReject = reject; + }); + } + + opts = opts || {}; + + var resolved = false; + var timer = null; + var occupationTimer = null; + var executionTimer = null; + var self = this; + + var done = function (locked, err, ret) { + + if (occupationTimer) { + clearTimeout(occupationTimer); + occupationTimer = null; + } + + if (executionTimer) { + clearTimeout(executionTimer); + executionTimer = null; + } + + if (locked) { + if (!!self.queues[key] && self.queues[key].length === 0) { + delete self.queues[key]; + } + if (self.domainReentrant) { + delete self.domains[key]; + } + } + + if (!resolved) { + if (!deferred) { + if (typeof (cb) === 'function') { + cb(err, ret); + } + } + else { + //promise mode + if (err) { + deferredReject(err); + } + else { + deferredResolve(ret); + } + } + resolved = true; + } + + if (locked) { + //run next func + if (!!self.queues[key] && self.queues[key].length > 0) { + self.queues[key].shift()(); + } + } + }; + + var exec = function (locked) { + if (resolved) { // may due to timed out + return done(locked); + } + + if (timer) { + clearTimeout(timer); + timer = null; + } + + if (self.domainReentrant && locked) { + self.domains[key] = process.domain; + } + + var maxExecutionTime = opts.maxExecutionTime || self.maxExecutionTime; + if (maxExecutionTime) { + executionTimer = setTimeout(function () { + if (!!self.queues[key]) { + done(locked, new Error('Maximum execution time is exceeded ' + key)); + } + }, maxExecutionTime); + } + + // Callback mode + if (fn.length === 1) { + var called = false; + try { + fn(function (err, ret) { + if (!called) { + called = true; + done(locked, err, ret); + } + }); + } catch (err) { + // catching error thrown in user function fn + if (!called) { + called = true; + done(locked, err); + } + } + } + else { + // Promise mode + self._promiseTry(function () { + return fn(); + }) + .then(function(ret){ + done(locked, undefined, ret); + }, function(error){ + done(locked, error); + }); + } + }; + + if (self.domainReentrant && !!process.domain) { + exec = process.domain.bind(exec); + } + + var maxPending = opts.maxPending || self.maxPending; + + if (!self.queues[key]) { + self.queues[key] = []; + exec(true); + } + else if (self.domainReentrant && !!process.domain && process.domain === self.domains[key]) { + // If code is in the same domain of current running task, run it directly + // Since lock is re-enterable + exec(false); + } + else if (self.queues[key].length >= maxPending) { + done(false, new Error('Too many pending tasks in queue ' + key)); + } + else { + var taskFn = function () { + exec(true); + }; + if (opts.skipQueue) { + self.queues[key].unshift(taskFn); + } else { + self.queues[key].push(taskFn); + } + + var timeout = opts.timeout || self.timeout; + if (timeout) { + timer = setTimeout(function () { + timer = null; + done(false, new Error('async-lock timed out in queue ' + key)); + }, timeout); + } + } + + var maxOccupationTime = opts.maxOccupationTime || self.maxOccupationTime; + if (maxOccupationTime) { + occupationTimer = setTimeout(function () { + if (!!self.queues[key]) { + done(false, new Error('Maximum occupation time is exceeded in queue ' + key)); + } + }, maxOccupationTime); + } + + if (deferred) { + return deferred; + } +}; - process.emitWarning(new RequestContentLengthMismatchError()) - } +/* + * Below is how this function works: + * + * Equivalent code: + * self.acquire(key1, function(cb){ + * self.acquire(key2, function(cb){ + * self.acquire(key3, fn, cb); + * }, cb); + * }, cb); + * + * Equivalent code: + * var fn3 = getFn(key3, fn); + * var fn2 = getFn(key2, fn3); + * var fn1 = getFn(key1, fn2); + * fn1(cb); + */ +AsyncLock.prototype._acquireBatch = function (keys, fn, cb, opts) { + if (typeof (cb) !== 'function') { + opts = cb; + cb = null; + } + + var self = this; + var getFn = function (key, fn) { + return function (cb) { + self.acquire(key, fn, cb, opts); + }; + }; + + var fnx = keys.reduceRight(function (prev, key) { + return getFn(key, prev); + }, fn); + + if (typeof (cb) === 'function') { + fnx(cb); + } + else { + return new this.Promise(function (resolve, reject) { + // check for promise mode in case keys is empty array + if (fnx.length === 1) { + fnx(function (err, ret) { + if (err) { + reject(err); + } + else { + resolve(ret); + } + }); + } else { + resolve(fnx()); + } + }); + } +}; - socket.cork() +/* + * Whether there is any running or pending asyncFunc + * + * @param {String} key + */ +AsyncLock.prototype.isBusy = function (key) { + if (!key) { + return Object.keys(this.queues).length > 0; + } + else { + return !!this.queues[key]; + } +}; - if (bytesWritten === 0) { - if (!expectsPayload) { - socket[kReset] = true - } +/** + * Promise.try() implementation to become independent of Q-specific methods + */ +AsyncLock.prototype._promiseTry = function(fn) { + try { + return this.Promise.resolve(fn()); + } catch (e) { + return this.Promise.reject(e); + } +}; - if (contentLength === null) { - socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1') - } else { - socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') - } - } +module.exports = AsyncLock; - if (contentLength === null) { - socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1') - } - this.bytesWritten += len +/***/ }), - const ret = socket.write(chunk) +/***/ 14812: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - socket.uncork() +module.exports = +{ + parallel : __nccwpck_require__(8210), + serial : __nccwpck_require__(50445), + serialOrdered : __nccwpck_require__(3578) +}; - request.onBodySent(chunk) - if (!ret) { - if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { - // istanbul ignore else: only for jest - if (socket[kParser].timeout.refresh) { - socket[kParser].timeout.refresh() - } - } - } +/***/ }), - return ret - } +/***/ 1700: +/***/ ((module) => { - end () { - const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this - request.onRequestSent() +// API +module.exports = abort; - socket[kWriting] = false +/** + * Aborts leftover active jobs + * + * @param {object} state - current state object + */ +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); - if (socket[kError]) { - throw socket[kError] - } + // reset leftover jobs + state.jobs = {}; +} - if (socket.destroyed) { - return - } +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } +} - if (bytesWritten === 0) { - if (expectsPayload) { - // https://tools.ietf.org/html/rfc7230#section-3.3.2 - // A user agent SHOULD send a Content-Length in a request message when - // no Transfer-Encoding is sent and the request method defines a meaning - // for an enclosed payload body. - socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') - } else { - socket.write(`${header}\r\n`, 'latin1') - } - } else if (contentLength === null) { - socket.write('\r\n0\r\n\r\n', 'latin1') - } +/***/ }), - if (contentLength !== null && bytesWritten !== contentLength) { - if (client[kStrictContentLength]) { - throw new RequestContentLengthMismatchError() - } else { - process.emitWarning(new RequestContentLengthMismatchError()) - } - } +/***/ 72794: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { - // istanbul ignore else: only for jest - if (socket[kParser].timeout.refresh) { - socket[kParser].timeout.refresh() - } - } +var defer = __nccwpck_require__(15295); - resume(client) - } +// API +module.exports = async; - destroy (err) { - const { socket, client } = this +/** + * Runs provided callback asynchronously + * even if callback itself is not + * + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback + */ +function async(callback) +{ + var isAsync = false; - socket[kWriting] = false + // check if async happened + defer(function() { isAsync = true; }); - if (err) { - assert(client[kRunning] <= 1, 'pipeline should only contain this request') - util.destroy(socket, err) + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); } - } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; } -function errorRequest (client, request, err) { - try { - request.onError(err) - assert(request.aborted) - } catch (err) { - client.emit('error', err) + +/***/ }), + +/***/ 15295: +/***/ ((module) => { + +module.exports = defer; + +/** + * Runs provided function on next iteration of the event loop + * + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); } } -module.exports = Client - /***/ }), -/***/ 6436: +/***/ 9023: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; - - -/* istanbul ignore file: only for Node 12 */ +var async = __nccwpck_require__(72794) + , abort = __nccwpck_require__(1700) + ; -const { kConnected, kSize } = __nccwpck_require__(2785) +// API +module.exports = iterate; -class CompatWeakRef { - constructor (value) { - this.value = value - } +/** + * Iterates over each job object + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed + */ +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; - deref () { - return this.value[kConnected] === 0 && this.value[kSize] === 0 - ? undefined - : this.value - } -} + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } -class CompatFinalizer { - constructor (finalizer) { - this.finalizer = finalizer - } + // clean up jobs + delete state.jobs[key]; - register (dispatcher, key) { - if (dispatcher.on) { - dispatcher.on('disconnect', () => { - if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) { - this.finalizer(key) - } - }) + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); } - } + else + { + state.results[key] = output; + } + + // return salvaged results + callback(error, state.results); + }); } -module.exports = function () { - // FIXME: remove workaround when the Node bug is fixed - // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 - if (process.env.NODE_V8_COVERAGE) { - return { - WeakRef: CompatWeakRef, - FinalizationRegistry: CompatFinalizer - } +/** + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else + */ +function runJob(iterator, key, item, callback) +{ + var aborter; + + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); } - return { - WeakRef: global.WeakRef || CompatWeakRef, - FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); } + + return aborter; } /***/ }), -/***/ 663: +/***/ 42474: /***/ ((module) => { -"use strict"; - +// API +module.exports = state; -// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size -const maxAttributeValueSize = 1024 +/** + * Creates initial state object + * for iteration over list + * + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object + */ +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length + } + ; -// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size -const maxNameValuePairSize = 4096 + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } -module.exports = { - maxAttributeValueSize, - maxNameValuePairSize + return initState; } /***/ }), -/***/ 1724: +/***/ 37942: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; - +var abort = __nccwpck_require__(1700) + , async = __nccwpck_require__(72794) + ; -const { parseSetCookie } = __nccwpck_require__(4408) -const { stringify, getHeadersList } = __nccwpck_require__(3121) -const { webidl } = __nccwpck_require__(1744) -const { Headers } = __nccwpck_require__(554) +// API +module.exports = terminator; /** - * @typedef {Object} Cookie - * @property {string} name - * @property {string} value - * @property {Date|number|undefined} expires - * @property {number|undefined} maxAge - * @property {string|undefined} domain - * @property {string|undefined} path - * @property {boolean|undefined} secure - * @property {boolean|undefined} httpOnly - * @property {'Strict'|'Lax'|'None'} sameSite - * @property {string[]} unparsed + * Terminates jobs in the attached state context + * + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination */ +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } -/** - * @param {Headers} headers - * @returns {Record} - */ -function getCookies (headers) { - webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' }) + // fast forward iteration index + this.index = this.size; - webidl.brandCheck(headers, Headers, { strict: false }) + // abort jobs + abort(this); - const cookie = headers.get('cookie') - const out = {} + // send back results we have so far + async(callback)(null, this.results); +} - if (!cookie) { - return out - } - for (const piece of cookie.split(';')) { - const [name, ...value] = piece.split('=') +/***/ }), - out[name.trim()] = value.join('=') - } +/***/ 8210: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return out -} +var iterate = __nccwpck_require__(9023) + , initState = __nccwpck_require__(42474) + , terminator = __nccwpck_require__(37942) + ; + +// Public API +module.exports = parallel; /** - * @param {Headers} headers - * @param {string} name - * @param {{ path?: string, domain?: string }|undefined} attributes - * @returns {void} + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator */ -function deleteCookie (headers, name, attributes) { - webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' }) +function parallel(list, iterator, callback) +{ + var state = initState(list); - webidl.brandCheck(headers, Headers, { strict: false }) + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } - name = webidl.converters.DOMString(name) - attributes = webidl.converters.DeleteCookieAttributes(attributes) + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); - // Matches behavior of - // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278 - setCookie(headers, { - name, - value: '', - expires: new Date(0), - ...attributes - }) + state.index++; + } + + return terminator.bind(state, callback); } -/** - * @param {Headers} headers - * @returns {Cookie[]} - */ -function getSetCookies (headers) { - webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' }) - webidl.brandCheck(headers, Headers, { strict: false }) +/***/ }), - const cookies = getHeadersList(headers).cookies +/***/ 50445: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (!cookies) { - return [] - } +var serialOrdered = __nccwpck_require__(3578); - // In older versions of undici, cookies is a list of name:value. - return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair)) -} +// Public API +module.exports = serial; /** - * @param {Headers} headers - * @param {Cookie} cookie - * @returns {void} + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator */ -function setCookie (headers, cookie) { - webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' }) +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} - webidl.brandCheck(headers, Headers, { strict: false }) - cookie = webidl.converters.Cookie(cookie) +/***/ }), - const str = stringify(cookie) +/***/ 3578: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (str) { - headers.append('Set-Cookie', stringify(cookie)) - } -} +var iterate = __nccwpck_require__(9023) + , initState = __nccwpck_require__(42474) + , terminator = __nccwpck_require__(37942) + ; -webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([ - { - converter: webidl.nullableConverter(webidl.converters.DOMString), - key: 'path', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters.DOMString), - key: 'domain', - defaultValue: null - } -]) +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; -webidl.converters.Cookie = webidl.dictionaryConverter([ - { - converter: webidl.converters.DOMString, - key: 'name' - }, - { - converter: webidl.converters.DOMString, - key: 'value' - }, - { - converter: webidl.nullableConverter((value) => { - if (typeof value === 'number') { - return webidl.converters['unsigned long long'](value) - } +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); - return new Date(value) - }), - key: 'expires', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters['long long']), - key: 'maxAge', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters.DOMString), - key: 'domain', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters.DOMString), - key: 'path', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters.boolean), - key: 'secure', - defaultValue: null - }, - { - converter: webidl.nullableConverter(webidl.converters.boolean), - key: 'httpOnly', - defaultValue: null - }, - { - converter: webidl.converters.USVString, - key: 'sameSite', - allowedValues: ['Strict', 'Lax', 'None'] - }, + iterate(list, iterator, state, function iteratorHandler(error, result) { - converter: webidl.sequenceConverter(webidl.converters.DOMString), - key: 'unparsed', - defaultValue: [] - } -]) - -module.exports = { - getCookies, - deleteCookie, - getSetCookies, - setCookie -} + if (error) + { + callback(error, result); + return; + } + state.index++; -/***/ }), + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } -/***/ 4408: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // done here + callback(null, state.results); + }); -"use strict"; + return terminator.bind(state, callback); +} +/* + * -- Sort methods + */ -const { maxNameValuePairSize, maxAttributeValueSize } = __nccwpck_require__(663) -const { isCTLExcludingHtab } = __nccwpck_require__(3121) -const { collectASequenceOfCodePointsFast } = __nccwpck_require__(685) -const assert = __nccwpck_require__(9491) +/** + * sort helper to sort array elements in ascending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; +} /** - * @description Parses the field-value attributes of a set-cookie header string. - * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 - * @param {string} header - * @returns if the header is invalid, null will be returned + * sort helper to sort array elements in descending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result */ -function parseSetCookie (header) { - // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F - // character (CTL characters excluding HTAB): Abort these steps and - // ignore the set-cookie-string entirely. - if (isCTLExcludingHtab(header)) { - return null - } +function descending(a, b) +{ + return -1 * ascending(a, b); +} - let nameValuePair = '' - let unparsedAttributes = '' - let name = '' - let value = '' - // 2. If the set-cookie-string contains a %x3B (";") character: - if (header.includes(';')) { - // 1. The name-value-pair string consists of the characters up to, - // but not including, the first %x3B (";"), and the unparsed- - // attributes consist of the remainder of the set-cookie-string - // (including the %x3B (";") in question). - const position = { position: 0 } +/***/ }), - nameValuePair = collectASequenceOfCodePointsFast(';', header, position) - unparsedAttributes = header.slice(position.position) - } else { - // Otherwise: +/***/ 33114: +/***/ (function(module) { - // 1. The name-value-pair string consists of all the characters - // contained in the set-cookie-string, and the unparsed- - // attributes is the empty string. - nameValuePair = header - } +/*! + * Bowser - a browser detector + * https://github.com/ded/bowser + * MIT License | (c) Dustin Diaz 2015 + */ - // 3. If the name-value-pair string lacks a %x3D ("=") character, then - // the name string is empty, and the value string is the value of - // name-value-pair. - if (!nameValuePair.includes('=')) { - value = nameValuePair - } else { - // Otherwise, the name string consists of the characters up to, but - // not including, the first %x3D ("=") character, and the (possibly - // empty) value string consists of the characters after the first - // %x3D ("=") character. - const position = { position: 0 } - name = collectASequenceOfCodePointsFast( - '=', - nameValuePair, - position - ) - value = nameValuePair.slice(position.position + 1) - } +!function (root, name, definition) { + if ( true && module.exports) module.exports = definition() + else if (typeof define == 'function' && define.amd) define(name, definition) + else root[name] = definition() +}(this, 'bowser', function () { + /** + * See useragents.js for examples of navigator.userAgent + */ + + var t = true + + function detect(ua) { + + function getFirstMatch(regex) { + var match = ua.match(regex); + return (match && match.length > 1 && match[1]) || ''; + } + + function getSecondMatch(regex) { + var match = ua.match(regex); + return (match && match.length > 1 && match[2]) || ''; + } + + var iosdevice = getFirstMatch(/(ipod|iphone|ipad)/i).toLowerCase() + , likeAndroid = /like android/i.test(ua) + , android = !likeAndroid && /android/i.test(ua) + , nexusMobile = /nexus\s*[0-6]\s*/i.test(ua) + , nexusTablet = !nexusMobile && /nexus\s*[0-9]+/i.test(ua) + , chromeos = /CrOS/.test(ua) + , silk = /silk/i.test(ua) + , sailfish = /sailfish/i.test(ua) + , tizen = /tizen/i.test(ua) + , webos = /(web|hpw)(o|0)s/i.test(ua) + , windowsphone = /windows phone/i.test(ua) + , samsungBrowser = /SamsungBrowser/i.test(ua) + , windows = !windowsphone && /windows/i.test(ua) + , mac = !iosdevice && !silk && /macintosh/i.test(ua) + , linux = !android && !sailfish && !tizen && !webos && /linux/i.test(ua) + , edgeVersion = getSecondMatch(/edg([ea]|ios)\/(\d+(\.\d+)?)/i) + , versionIdentifier = getFirstMatch(/version\/(\d+(\.\d+)?)/i) + , tablet = /tablet/i.test(ua) && !/tablet pc/i.test(ua) + , mobile = !tablet && /[^-]mobi/i.test(ua) + , xbox = /xbox/i.test(ua) + , result + + if (/opera/i.test(ua)) { + // an old Opera + result = { + name: 'Opera' + , opera: t + , version: versionIdentifier || getFirstMatch(/(?:opera|opr|opios)[\s\/](\d+(\.\d+)?)/i) + } + } else if (/opr\/|opios/i.test(ua)) { + // a new Opera + result = { + name: 'Opera' + , opera: t + , version: getFirstMatch(/(?:opr|opios)[\s\/](\d+(\.\d+)?)/i) || versionIdentifier + } + } + else if (/SamsungBrowser/i.test(ua)) { + result = { + name: 'Samsung Internet for Android' + , samsungBrowser: t + , version: versionIdentifier || getFirstMatch(/(?:SamsungBrowser)[\s\/](\d+(\.\d+)?)/i) + } + } + else if (/Whale/i.test(ua)) { + result = { + name: 'NAVER Whale browser' + , whale: t + , version: getFirstMatch(/(?:whale)[\s\/](\d+(?:\.\d+)+)/i) + } + } + else if (/MZBrowser/i.test(ua)) { + result = { + name: 'MZ Browser' + , mzbrowser: t + , version: getFirstMatch(/(?:MZBrowser)[\s\/](\d+(?:\.\d+)+)/i) + } + } + else if (/coast/i.test(ua)) { + result = { + name: 'Opera Coast' + , coast: t + , version: versionIdentifier || getFirstMatch(/(?:coast)[\s\/](\d+(\.\d+)?)/i) + } + } + else if (/focus/i.test(ua)) { + result = { + name: 'Focus' + , focus: t + , version: getFirstMatch(/(?:focus)[\s\/](\d+(?:\.\d+)+)/i) + } + } + else if (/yabrowser/i.test(ua)) { + result = { + name: 'Yandex Browser' + , yandexbrowser: t + , version: versionIdentifier || getFirstMatch(/(?:yabrowser)[\s\/](\d+(\.\d+)?)/i) + } + } + else if (/ucbrowser/i.test(ua)) { + result = { + name: 'UC Browser' + , ucbrowser: t + , version: getFirstMatch(/(?:ucbrowser)[\s\/](\d+(?:\.\d+)+)/i) + } + } + else if (/mxios/i.test(ua)) { + result = { + name: 'Maxthon' + , maxthon: t + , version: getFirstMatch(/(?:mxios)[\s\/](\d+(?:\.\d+)+)/i) + } + } + else if (/epiphany/i.test(ua)) { + result = { + name: 'Epiphany' + , epiphany: t + , version: getFirstMatch(/(?:epiphany)[\s\/](\d+(?:\.\d+)+)/i) + } + } + else if (/puffin/i.test(ua)) { + result = { + name: 'Puffin' + , puffin: t + , version: getFirstMatch(/(?:puffin)[\s\/](\d+(?:\.\d+)?)/i) + } + } + else if (/sleipnir/i.test(ua)) { + result = { + name: 'Sleipnir' + , sleipnir: t + , version: getFirstMatch(/(?:sleipnir)[\s\/](\d+(?:\.\d+)+)/i) + } + } + else if (/k-meleon/i.test(ua)) { + result = { + name: 'K-Meleon' + , kMeleon: t + , version: getFirstMatch(/(?:k-meleon)[\s\/](\d+(?:\.\d+)+)/i) + } + } + else if (windowsphone) { + result = { + name: 'Windows Phone' + , osname: 'Windows Phone' + , windowsphone: t + } + if (edgeVersion) { + result.msedge = t + result.version = edgeVersion + } + else { + result.msie = t + result.version = getFirstMatch(/iemobile\/(\d+(\.\d+)?)/i) + } + } + else if (/msie|trident/i.test(ua)) { + result = { + name: 'Internet Explorer' + , msie: t + , version: getFirstMatch(/(?:msie |rv:)(\d+(\.\d+)?)/i) + } + } else if (chromeos) { + result = { + name: 'Chrome' + , osname: 'Chrome OS' + , chromeos: t + , chromeBook: t + , chrome: t + , version: getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.\d+)?)/i) + } + } else if (/edg([ea]|ios)/i.test(ua)) { + result = { + name: 'Microsoft Edge' + , msedge: t + , version: edgeVersion + } + } + else if (/vivaldi/i.test(ua)) { + result = { + name: 'Vivaldi' + , vivaldi: t + , version: getFirstMatch(/vivaldi\/(\d+(\.\d+)?)/i) || versionIdentifier + } + } + else if (sailfish) { + result = { + name: 'Sailfish' + , osname: 'Sailfish OS' + , sailfish: t + , version: getFirstMatch(/sailfish\s?browser\/(\d+(\.\d+)?)/i) + } + } + else if (/seamonkey\//i.test(ua)) { + result = { + name: 'SeaMonkey' + , seamonkey: t + , version: getFirstMatch(/seamonkey\/(\d+(\.\d+)?)/i) + } + } + else if (/firefox|iceweasel|fxios/i.test(ua)) { + result = { + name: 'Firefox' + , firefox: t + , version: getFirstMatch(/(?:firefox|iceweasel|fxios)[ \/](\d+(\.\d+)?)/i) + } + if (/\((mobile|tablet);[^\)]*rv:[\d\.]+\)/i.test(ua)) { + result.firefoxos = t + result.osname = 'Firefox OS' + } + } + else if (silk) { + result = { + name: 'Amazon Silk' + , silk: t + , version : getFirstMatch(/silk\/(\d+(\.\d+)?)/i) + } + } + else if (/phantom/i.test(ua)) { + result = { + name: 'PhantomJS' + , phantom: t + , version: getFirstMatch(/phantomjs\/(\d+(\.\d+)?)/i) + } + } + else if (/slimerjs/i.test(ua)) { + result = { + name: 'SlimerJS' + , slimer: t + , version: getFirstMatch(/slimerjs\/(\d+(\.\d+)?)/i) + } + } + else if (/blackberry|\bbb\d+/i.test(ua) || /rim\stablet/i.test(ua)) { + result = { + name: 'BlackBerry' + , osname: 'BlackBerry OS' + , blackberry: t + , version: versionIdentifier || getFirstMatch(/blackberry[\d]+\/(\d+(\.\d+)?)/i) + } + } + else if (webos) { + result = { + name: 'WebOS' + , osname: 'WebOS' + , webos: t + , version: versionIdentifier || getFirstMatch(/w(?:eb)?osbrowser\/(\d+(\.\d+)?)/i) + }; + /touchpad\//i.test(ua) && (result.touchpad = t) + } + else if (/bada/i.test(ua)) { + result = { + name: 'Bada' + , osname: 'Bada' + , bada: t + , version: getFirstMatch(/dolfin\/(\d+(\.\d+)?)/i) + }; + } + else if (tizen) { + result = { + name: 'Tizen' + , osname: 'Tizen' + , tizen: t + , version: getFirstMatch(/(?:tizen\s?)?browser\/(\d+(\.\d+)?)/i) || versionIdentifier + }; + } + else if (/qupzilla/i.test(ua)) { + result = { + name: 'QupZilla' + , qupzilla: t + , version: getFirstMatch(/(?:qupzilla)[\s\/](\d+(?:\.\d+)+)/i) || versionIdentifier + } + } + else if (/chromium/i.test(ua)) { + result = { + name: 'Chromium' + , chromium: t + , version: getFirstMatch(/(?:chromium)[\s\/](\d+(?:\.\d+)?)/i) || versionIdentifier + } + } + else if (/chrome|crios|crmo/i.test(ua)) { + result = { + name: 'Chrome' + , chrome: t + , version: getFirstMatch(/(?:chrome|crios|crmo)\/(\d+(\.\d+)?)/i) + } + } + else if (android) { + result = { + name: 'Android' + , version: versionIdentifier + } + } + else if (/safari|applewebkit/i.test(ua)) { + result = { + name: 'Safari' + , safari: t + } + if (versionIdentifier) { + result.version = versionIdentifier + } + } + else if (iosdevice) { + result = { + name : iosdevice == 'iphone' ? 'iPhone' : iosdevice == 'ipad' ? 'iPad' : 'iPod' + } + // WTF: version is not part of user agent in web apps + if (versionIdentifier) { + result.version = versionIdentifier + } + } + else if(/googlebot/i.test(ua)) { + result = { + name: 'Googlebot' + , googlebot: t + , version: getFirstMatch(/googlebot\/(\d+(\.\d+))/i) || versionIdentifier + } + } + else { + result = { + name: getFirstMatch(/^(.*)\/(.*) /), + version: getSecondMatch(/^(.*)\/(.*) /) + }; + } + + // set webkit or gecko flag for browsers based on these engines + if (!result.msedge && /(apple)?webkit/i.test(ua)) { + if (/(apple)?webkit\/537\.36/i.test(ua)) { + result.name = result.name || "Blink" + result.blink = t + } else { + result.name = result.name || "Webkit" + result.webkit = t + } + if (!result.version && versionIdentifier) { + result.version = versionIdentifier + } + } else if (!result.opera && /gecko\//i.test(ua)) { + result.name = result.name || "Gecko" + result.gecko = t + result.version = result.version || getFirstMatch(/gecko\/(\d+(\.\d+)?)/i) + } + + // set OS flags for platforms that have multiple browsers + if (!result.windowsphone && (android || result.silk)) { + result.android = t + result.osname = 'Android' + } else if (!result.windowsphone && iosdevice) { + result[iosdevice] = t + result.ios = t + result.osname = 'iOS' + } else if (mac) { + result.mac = t + result.osname = 'macOS' + } else if (xbox) { + result.xbox = t + result.osname = 'Xbox' + } else if (windows) { + result.windows = t + result.osname = 'Windows' + } else if (linux) { + result.linux = t + result.osname = 'Linux' + } + + function getWindowsVersion (s) { + switch (s) { + case 'NT': return 'NT' + case 'XP': return 'XP' + case 'NT 5.0': return '2000' + case 'NT 5.1': return 'XP' + case 'NT 5.2': return '2003' + case 'NT 6.0': return 'Vista' + case 'NT 6.1': return '7' + case 'NT 6.2': return '8' + case 'NT 6.3': return '8.1' + case 'NT 10.0': return '10' + default: return undefined + } + } - // 4. Remove any leading or trailing WSP characters from the name - // string and the value string. - name = name.trim() - value = value.trim() + // OS version extraction + var osVersion = ''; + if (result.windows) { + osVersion = getWindowsVersion(getFirstMatch(/Windows ((NT|XP)( \d\d?.\d)?)/i)) + } else if (result.windowsphone) { + osVersion = getFirstMatch(/windows phone (?:os)?\s?(\d+(\.\d+)*)/i); + } else if (result.mac) { + osVersion = getFirstMatch(/Mac OS X (\d+([_\.\s]\d+)*)/i); + osVersion = osVersion.replace(/[_\s]/g, '.'); + } else if (iosdevice) { + osVersion = getFirstMatch(/os (\d+([_\s]\d+)*) like mac os x/i); + osVersion = osVersion.replace(/[_\s]/g, '.'); + } else if (android) { + osVersion = getFirstMatch(/android[ \/-](\d+(\.\d+)*)/i); + } else if (result.webos) { + osVersion = getFirstMatch(/(?:web|hpw)os\/(\d+(\.\d+)*)/i); + } else if (result.blackberry) { + osVersion = getFirstMatch(/rim\stablet\sos\s(\d+(\.\d+)*)/i); + } else if (result.bada) { + osVersion = getFirstMatch(/bada\/(\d+(\.\d+)*)/i); + } else if (result.tizen) { + osVersion = getFirstMatch(/tizen[\/\s](\d+(\.\d+)*)/i); + } + if (osVersion) { + result.osversion = osVersion; + } + + // device type extraction + var osMajorVersion = !result.windows && osVersion.split('.')[0]; + if ( + tablet + || nexusTablet + || iosdevice == 'ipad' + || (android && (osMajorVersion == 3 || (osMajorVersion >= 4 && !mobile))) + || result.silk + ) { + result.tablet = t + } else if ( + mobile + || iosdevice == 'iphone' + || iosdevice == 'ipod' + || android + || nexusMobile + || result.blackberry + || result.webos + || result.bada + ) { + result.mobile = t + } + + // Graded Browser Support + // http://developer.yahoo.com/yui/articles/gbs + if (result.msedge || + (result.msie && result.version >= 10) || + (result.yandexbrowser && result.version >= 15) || + (result.vivaldi && result.version >= 1.0) || + (result.chrome && result.version >= 20) || + (result.samsungBrowser && result.version >= 4) || + (result.whale && compareVersions([result.version, '1.0']) === 1) || + (result.mzbrowser && compareVersions([result.version, '6.0']) === 1) || + (result.focus && compareVersions([result.version, '1.0']) === 1) || + (result.firefox && result.version >= 20.0) || + (result.safari && result.version >= 6) || + (result.opera && result.version >= 10.0) || + (result.ios && result.osversion && result.osversion.split(".")[0] >= 6) || + (result.blackberry && result.version >= 10.1) + || (result.chromium && result.version >= 20) + ) { + result.a = t; + } + else if ((result.msie && result.version < 10) || + (result.chrome && result.version < 20) || + (result.firefox && result.version < 20.0) || + (result.safari && result.version < 6) || + (result.opera && result.version < 10.0) || + (result.ios && result.osversion && result.osversion.split(".")[0] < 6) + || (result.chromium && result.version < 20) + ) { + result.c = t + } else result.x = t - // 5. If the sum of the lengths of the name string and the value string - // is more than 4096 octets, abort these steps and ignore the set- - // cookie-string entirely. - if (name.length + value.length > maxNameValuePairSize) { - return null + return result } - // 6. The cookie-name is the name string, and the cookie-value is the - // value string. - return { - name, value, ...parseUnparsedAttributes(unparsedAttributes) - } -} + var bowser = detect(typeof navigator !== 'undefined' ? navigator.userAgent || '' : '') -/** - * Parses the remaining attributes of a set-cookie header - * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 - * @param {string} unparsedAttributes - * @param {[Object.]={}} cookieAttributeList - */ -function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) { - // 1. If the unparsed-attributes string is empty, skip the rest of - // these steps. - if (unparsedAttributes.length === 0) { - return cookieAttributeList + bowser.test = function (browserList) { + for (var i = 0; i < browserList.length; ++i) { + var browserItem = browserList[i]; + if (typeof browserItem=== 'string') { + if (browserItem in bowser) { + return true; + } + } + } + return false; } - // 2. Discard the first character of the unparsed-attributes (which - // will be a %x3B (";") character). - assert(unparsedAttributes[0] === ';') - unparsedAttributes = unparsedAttributes.slice(1) + /** + * Get version precisions count + * + * @example + * getVersionPrecision("1.10.3") // 3 + * + * @param {string} version + * @return {number} + */ + function getVersionPrecision(version) { + return version.split(".").length; + } - let cookieAv = '' + /** + * Array::map polyfill + * + * @param {Array} arr + * @param {Function} iterator + * @return {Array} + */ + function map(arr, iterator) { + var result = [], i; + if (Array.prototype.map) { + return Array.prototype.map.call(arr, iterator); + } + for (i = 0; i < arr.length; i++) { + result.push(iterator(arr[i])); + } + return result; + } - // 3. If the remaining unparsed-attributes contains a %x3B (";") - // character: - if (unparsedAttributes.includes(';')) { - // 1. Consume the characters of the unparsed-attributes up to, but - // not including, the first %x3B (";") character. - cookieAv = collectASequenceOfCodePointsFast( - ';', - unparsedAttributes, - { position: 0 } - ) - unparsedAttributes = unparsedAttributes.slice(cookieAv.length) - } else { - // Otherwise: + /** + * Calculate browser version weight + * + * @example + * compareVersions(['1.10.2.1', '1.8.2.1.90']) // 1 + * compareVersions(['1.010.2.1', '1.09.2.1.90']); // 1 + * compareVersions(['1.10.2.1', '1.10.2.1']); // 0 + * compareVersions(['1.10.2.1', '1.0800.2']); // -1 + * + * @param {Array} versions versions to compare + * @return {Number} comparison result + */ + function compareVersions(versions) { + // 1) get common precision for both versions, for example for "10.0" and "9" it should be 2 + var precision = Math.max(getVersionPrecision(versions[0]), getVersionPrecision(versions[1])); + var chunks = map(versions, function (version) { + var delta = precision - getVersionPrecision(version); + + // 2) "9" -> "9.0" (for precision = 2) + version = version + new Array(delta + 1).join(".0"); + + // 3) "9.0" -> ["000000000"", "000000009"] + return map(version.split("."), function (chunk) { + return new Array(20 - chunk.length).join("0") + chunk; + }).reverse(); + }); - // 1. Consume the remainder of the unparsed-attributes. - cookieAv = unparsedAttributes - unparsedAttributes = '' + // iterate in reverse order by reversed chunks array + while (--precision >= 0) { + // 4) compare: "000000009" > "000000010" = false (but "9" > "10" = true) + if (chunks[0][precision] > chunks[1][precision]) { + return 1; + } + else if (chunks[0][precision] === chunks[1][precision]) { + if (precision === 0) { + // all version chunks are same + return 0; + } + } + else { + return -1; + } + } } - // Let the cookie-av string be the characters consumed in this step. + /** + * Check if browser is unsupported + * + * @example + * bowser.isUnsupportedBrowser({ + * msie: "10", + * firefox: "23", + * chrome: "29", + * safari: "5.1", + * opera: "16", + * phantom: "534" + * }); + * + * @param {Object} minVersions map of minimal version to browser + * @param {Boolean} [strictMode = false] flag to return false if browser wasn't found in map + * @param {String} [ua] user agent string + * @return {Boolean} + */ + function isUnsupportedBrowser(minVersions, strictMode, ua) { + var _bowser = bowser; - let attributeName = '' - let attributeValue = '' + // make strictMode param optional with ua param usage + if (typeof strictMode === 'string') { + ua = strictMode; + strictMode = void(0); + } - // 4. If the cookie-av string contains a %x3D ("=") character: - if (cookieAv.includes('=')) { - // 1. The (possibly empty) attribute-name string consists of the - // characters up to, but not including, the first %x3D ("=") - // character, and the (possibly empty) attribute-value string - // consists of the characters after the first %x3D ("=") - // character. - const position = { position: 0 } + if (strictMode === void(0)) { + strictMode = false; + } + if (ua) { + _bowser = detect(ua); + } - attributeName = collectASequenceOfCodePointsFast( - '=', - cookieAv, - position - ) - attributeValue = cookieAv.slice(position.position + 1) - } else { - // Otherwise: + var version = "" + _bowser.version; + for (var browser in minVersions) { + if (minVersions.hasOwnProperty(browser)) { + if (_bowser[browser]) { + if (typeof minVersions[browser] !== 'string') { + throw new Error('Browser version in the minVersion map should be a string: ' + browser + ': ' + String(minVersions)); + } - // 1. The attribute-name string consists of the entire cookie-av - // string, and the attribute-value string is empty. - attributeName = cookieAv - } + // browser version and min supported version. + return compareVersions([version, minVersions[browser]]) < 0; + } + } + } - // 5. Remove any leading or trailing WSP characters from the attribute- - // name string and the attribute-value string. - attributeName = attributeName.trim() - attributeValue = attributeValue.trim() + return strictMode; // not found + } - // 6. If the attribute-value is longer than 1024 octets, ignore the - // cookie-av string and return to Step 1 of this algorithm. - if (attributeValue.length > maxAttributeValueSize) { - return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + /** + * Check if browser is supported + * + * @param {Object} minVersions map of minimal version to browser + * @param {Boolean} [strictMode = false] flag to return false if browser wasn't found in map + * @param {String} [ua] user agent string + * @return {Boolean} + */ + function check(minVersions, strictMode, ua) { + return !isUnsupportedBrowser(minVersions, strictMode, ua); } - // 7. Process the attribute-name and attribute-value according to the - // requirements in the following subsections. (Notice that - // attributes with unrecognized attribute-names are ignored.) - const attributeNameLowercase = attributeName.toLowerCase() + bowser.isUnsupportedBrowser = isUnsupportedBrowser; + bowser.compareVersions = compareVersions; + bowser.check = check; - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1 - // If the attribute-name case-insensitively matches the string - // "Expires", the user agent MUST process the cookie-av as follows. - if (attributeNameLowercase === 'expires') { - // 1. Let the expiry-time be the result of parsing the attribute-value - // as cookie-date (see Section 5.1.1). - const expiryTime = new Date(attributeValue) + /* + * Set our detect method to the main bowser object so we can + * reuse it to test other user agents. + * This is needed to implement future tests. + */ + bowser._detect = detect; - // 2. If the attribute-value failed to parse as a cookie date, ignore - // the cookie-av. + /* + * Set our detect public method to the main bowser object + * This is needed to implement bowser in server side + */ + bowser.detect = detect; + return bowser +}); - cookieAttributeList.expires = expiryTime - } else if (attributeNameLowercase === 'max-age') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2 - // If the attribute-name case-insensitively matches the string "Max- - // Age", the user agent MUST process the cookie-av as follows. - // 1. If the first character of the attribute-value is not a DIGIT or a - // "-" character, ignore the cookie-av. - const charCode = attributeValue.charCodeAt(0) +/***/ }), - if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') { - return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) - } +/***/ 28803: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 2. If the remainder of attribute-value contains a non-DIGIT - // character, ignore the cookie-av. - if (!/^\d+$/.test(attributeValue)) { - return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) - } +"use strict"; - // 3. Let delta-seconds be the attribute-value converted to an integer. - const deltaSeconds = Number(attributeValue) - // 4. Let cookie-age-limit be the maximum age of the cookie (which - // SHOULD be 400 days or less, see Section 4.1.2.2). +var GetIntrinsic = __nccwpck_require__(74538); - // 5. Set delta-seconds to the smaller of its present value and cookie- - // age-limit. - // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs) +var callBind = __nccwpck_require__(62977); - // 6. If delta-seconds is less than or equal to zero (0), let expiry- - // time be the earliest representable date and time. Otherwise, let - // the expiry-time be the current date and time plus delta-seconds - // seconds. - // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds +var $indexOf = callBind(GetIntrinsic('String.prototype.indexOf')); - // 7. Append an attribute to the cookie-attribute-list with an - // attribute-name of Max-Age and an attribute-value of expiry-time. - cookieAttributeList.maxAge = deltaSeconds - } else if (attributeNameLowercase === 'domain') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3 - // If the attribute-name case-insensitively matches the string "Domain", - // the user agent MUST process the cookie-av as follows. +module.exports = function callBoundIntrinsic(name, allowMissing) { + var intrinsic = GetIntrinsic(name, !!allowMissing); + if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) { + return callBind(intrinsic); + } + return intrinsic; +}; - // 1. Let cookie-domain be the attribute-value. - let cookieDomain = attributeValue - // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be - // cookie-domain without its leading %x2E ("."). - if (cookieDomain[0] === '.') { - cookieDomain = cookieDomain.slice(1) - } +/***/ }), - // 3. Convert the cookie-domain to lower case. - cookieDomain = cookieDomain.toLowerCase() +/***/ 62977: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 4. Append an attribute to the cookie-attribute-list with an - // attribute-name of Domain and an attribute-value of cookie-domain. - cookieAttributeList.domain = cookieDomain - } else if (attributeNameLowercase === 'path') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4 - // If the attribute-name case-insensitively matches the string "Path", - // the user agent MUST process the cookie-av as follows. +"use strict"; - // 1. If the attribute-value is empty or if the first character of the - // attribute-value is not %x2F ("/"): - let cookiePath = '' - if (attributeValue.length === 0 || attributeValue[0] !== '/') { - // 1. Let cookie-path be the default-path. - cookiePath = '/' - } else { - // Otherwise: - // 1. Let cookie-path be the attribute-value. - cookiePath = attributeValue - } +var bind = __nccwpck_require__(88334); +var GetIntrinsic = __nccwpck_require__(74538); +var setFunctionLength = __nccwpck_require__(64056); + +var $TypeError = __nccwpck_require__(6361); +var $apply = GetIntrinsic('%Function.prototype.apply%'); +var $call = GetIntrinsic('%Function.prototype.call%'); +var $reflectApply = GetIntrinsic('%Reflect.apply%', true) || bind.call($call, $apply); + +var $defineProperty = __nccwpck_require__(6123); +var $max = GetIntrinsic('%Math.max%'); + +module.exports = function callBind(originalFunction) { + if (typeof originalFunction !== 'function') { + throw new $TypeError('a function is required'); + } + var func = $reflectApply(bind, $call, arguments); + return setFunctionLength( + func, + 1 + $max(0, originalFunction.length - (arguments.length - 1)), + true + ); +}; - // 2. Append an attribute to the cookie-attribute-list with an - // attribute-name of Path and an attribute-value of cookie-path. - cookieAttributeList.path = cookiePath - } else if (attributeNameLowercase === 'secure') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5 - // If the attribute-name case-insensitively matches the string "Secure", - // the user agent MUST append an attribute to the cookie-attribute-list - // with an attribute-name of Secure and an empty attribute-value. +var applyBind = function applyBind() { + return $reflectApply(bind, $apply, arguments); +}; - cookieAttributeList.secure = true - } else if (attributeNameLowercase === 'httponly') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6 - // If the attribute-name case-insensitively matches the string - // "HttpOnly", the user agent MUST append an attribute to the cookie- - // attribute-list with an attribute-name of HttpOnly and an empty - // attribute-value. +if ($defineProperty) { + $defineProperty(module.exports, 'apply', { value: applyBind }); +} else { + module.exports.apply = applyBind; +} - cookieAttributeList.httpOnly = true - } else if (attributeNameLowercase === 'samesite') { - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7 - // If the attribute-name case-insensitively matches the string - // "SameSite", the user agent MUST process the cookie-av as follows: - // 1. Let enforcement be "Default". - let enforcement = 'Default' +/***/ }), - const attributeValueLowercase = attributeValue.toLowerCase() - // 2. If cookie-av's attribute-value is a case-insensitive match for - // "None", set enforcement to "None". - if (attributeValueLowercase.includes('none')) { - enforcement = 'None' - } +/***/ 43268: +/***/ ((module) => { - // 3. If cookie-av's attribute-value is a case-insensitive match for - // "Strict", set enforcement to "Strict". - if (attributeValueLowercase.includes('strict')) { - enforcement = 'Strict' - } +"use strict"; - // 4. If cookie-av's attribute-value is a case-insensitive match for - // "Lax", set enforcement to "Lax". - if (attributeValueLowercase.includes('lax')) { - enforcement = 'Lax' - } - // 5. Append an attribute to the cookie-attribute-list with an - // attribute-name of "SameSite" and an attribute-value of - // enforcement. - cookieAttributeList.sameSite = enforcement - } else { - cookieAttributeList.unparsed ??= [] +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} - cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`) - } +function replaceAll(str, search, replacement) { + search = search instanceof RegExp ? search : new RegExp(escapeRegExp(search), 'g'); - // 8. Return to Step 1 of this algorithm. - return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + return str.replace(search, replacement); } -module.exports = { - parseSetCookie, - parseUnparsedAttributes -} +var CleanGitRef = { + clean: function clean(value) { + if (typeof value !== 'string') { + throw new Error('Expected a string, received: ' + value); + } + + value = replaceAll(value, './', '/'); + value = replaceAll(value, '..', '.'); + value = replaceAll(value, ' ', '-'); + value = replaceAll(value, /^[~^:?*\\\-]/g, ''); + value = replaceAll(value, /[~^:?*\\]/g, '-'); + value = replaceAll(value, /[~^:?*\\\-]$/g, ''); + value = replaceAll(value, '@{', '-'); + value = replaceAll(value, /\.$/g, ''); + value = replaceAll(value, /\/$/g, ''); + value = replaceAll(value, /\.lock$/g, ''); + return value; + } +}; +module.exports = CleanGitRef; /***/ }), -/***/ 3121: +/***/ 85443: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var util = __nccwpck_require__(73837); +var Stream = (__nccwpck_require__(12781).Stream); +var DelayedStream = __nccwpck_require__(18611); +module.exports = CombinedStream; +function CombinedStream() { + this.writable = false; + this.readable = true; + this.dataSize = 0; + this.maxDataSize = 2 * 1024 * 1024; + this.pauseStreams = true; -const assert = __nccwpck_require__(9491) -const { kHeadersList } = __nccwpck_require__(2785) + this._released = false; + this._streams = []; + this._currentStream = null; + this._insideLoop = false; + this._pendingNext = false; +} +util.inherits(CombinedStream, Stream); -function isCTLExcludingHtab (value) { - if (value.length === 0) { - return false +CombinedStream.create = function(options) { + var combinedStream = new this(); + + options = options || {}; + for (var option in options) { + combinedStream[option] = options[option]; } - for (const char of value) { - const code = char.charCodeAt(0) + return combinedStream; +}; - if ( - (code >= 0x00 || code <= 0x08) || - (code >= 0x0A || code <= 0x1F) || - code === 0x7F - ) { - return false +CombinedStream.isStreamLike = function(stream) { + return (typeof stream !== 'function') + && (typeof stream !== 'string') + && (typeof stream !== 'boolean') + && (typeof stream !== 'number') + && (!Buffer.isBuffer(stream)); +}; + +CombinedStream.prototype.append = function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + + if (isStreamLike) { + if (!(stream instanceof DelayedStream)) { + var newStream = DelayedStream.create(stream, { + maxDataSize: Infinity, + pauseStream: this.pauseStreams, + }); + stream.on('data', this._checkDataSize.bind(this)); + stream = newStream; } - } -} -/** - CHAR = - token = 1* - separators = "(" | ")" | "<" | ">" | "@" - | "," | ";" | ":" | "\" | <"> - | "/" | "[" | "]" | "?" | "=" - | "{" | "}" | SP | HT - * @param {string} name - */ -function validateCookieName (name) { - for (const char of name) { - const code = char.charCodeAt(0) + this._handleErrors(stream); - if ( - (code <= 0x20 || code > 0x7F) || - char === '(' || - char === ')' || - char === '>' || - char === '<' || - char === '@' || - char === ',' || - char === ';' || - char === ':' || - char === '\\' || - char === '"' || - char === '/' || - char === '[' || - char === ']' || - char === '?' || - char === '=' || - char === '{' || - char === '}' - ) { - throw new Error('Invalid cookie name') + if (this.pauseStreams) { + stream.pause(); } } -} -/** - cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) - cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E - ; US-ASCII characters excluding CTLs, - ; whitespace DQUOTE, comma, semicolon, - ; and backslash - * @param {string} value - */ -function validateCookieValue (value) { - for (const char of value) { - const code = char.charCodeAt(0) + this._streams.push(stream); + return this; +}; - if ( - code < 0x21 || // exclude CTLs (0-31) - code === 0x22 || - code === 0x2C || - code === 0x3B || - code === 0x5C || - code > 0x7E // non-ascii - ) { - throw new Error('Invalid header value') - } +CombinedStream.prototype.pipe = function(dest, options) { + Stream.prototype.pipe.call(this, dest, options); + this.resume(); + return dest; +}; + +CombinedStream.prototype._getNext = function() { + this._currentStream = null; + + if (this._insideLoop) { + this._pendingNext = true; + return; // defer call } -} -/** - * path-value = - * @param {string} path - */ -function validateCookiePath (path) { - for (const char of path) { - const code = char.charCodeAt(0) + this._insideLoop = true; + try { + do { + this._pendingNext = false; + this._realGetNext(); + } while (this._pendingNext); + } finally { + this._insideLoop = false; + } +}; - if (code < 0x21 || char === ';') { - throw new Error('Invalid cookie path') - } +CombinedStream.prototype._realGetNext = function() { + var stream = this._streams.shift(); + + + if (typeof stream == 'undefined') { + this.end(); + return; } -} -/** - * I have no idea why these values aren't allowed to be honest, - * but Deno tests these. - Khafra - * @param {string} domain - */ -function validateCookieDomain (domain) { - if ( - domain.startsWith('-') || - domain.endsWith('.') || - domain.endsWith('-') - ) { - throw new Error('Invalid cookie domain') + if (typeof stream !== 'function') { + this._pipeNext(stream); + return; } -} -/** - * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1 - * @param {number|Date} date - IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT - ; fixed length/zone/capitalization subset of the format - ; see Section 3.3 of [RFC5322] + var getStream = stream; + getStream(function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('data', this._checkDataSize.bind(this)); + this._handleErrors(stream); + } - day-name = %x4D.6F.6E ; "Mon", case-sensitive - / %x54.75.65 ; "Tue", case-sensitive - / %x57.65.64 ; "Wed", case-sensitive - / %x54.68.75 ; "Thu", case-sensitive - / %x46.72.69 ; "Fri", case-sensitive - / %x53.61.74 ; "Sat", case-sensitive - / %x53.75.6E ; "Sun", case-sensitive - date1 = day SP month SP year - ; e.g., 02 Jun 1982 + this._pipeNext(stream); + }.bind(this)); +}; - day = 2DIGIT - month = %x4A.61.6E ; "Jan", case-sensitive - / %x46.65.62 ; "Feb", case-sensitive - / %x4D.61.72 ; "Mar", case-sensitive - / %x41.70.72 ; "Apr", case-sensitive - / %x4D.61.79 ; "May", case-sensitive - / %x4A.75.6E ; "Jun", case-sensitive - / %x4A.75.6C ; "Jul", case-sensitive - / %x41.75.67 ; "Aug", case-sensitive - / %x53.65.70 ; "Sep", case-sensitive - / %x4F.63.74 ; "Oct", case-sensitive - / %x4E.6F.76 ; "Nov", case-sensitive - / %x44.65.63 ; "Dec", case-sensitive - year = 4DIGIT +CombinedStream.prototype._pipeNext = function(stream) { + this._currentStream = stream; - GMT = %x47.4D.54 ; "GMT", case-sensitive + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('end', this._getNext.bind(this)); + stream.pipe(this, {end: false}); + return; + } - time-of-day = hour ":" minute ":" second - ; 00:00:00 - 23:59:60 (leap second) + var value = stream; + this.write(value); + this._getNext(); +}; - hour = 2DIGIT - minute = 2DIGIT - second = 2DIGIT - */ -function toIMFDate (date) { - if (typeof date === 'number') { - date = new Date(date) +CombinedStream.prototype._handleErrors = function(stream) { + var self = this; + stream.on('error', function(err) { + self._emitError(err); + }); +}; + +CombinedStream.prototype.write = function(data) { + this.emit('data', data); +}; + +CombinedStream.prototype.pause = function() { + if (!this.pauseStreams) { + return; } - const days = [ - 'Sun', 'Mon', 'Tue', 'Wed', - 'Thu', 'Fri', 'Sat' - ] + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); + this.emit('pause'); +}; - const months = [ - 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', - 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' - ] +CombinedStream.prototype.resume = function() { + if (!this._released) { + this._released = true; + this.writable = true; + this._getNext(); + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); + this.emit('resume'); +}; + +CombinedStream.prototype.end = function() { + this._reset(); + this.emit('end'); +}; + +CombinedStream.prototype.destroy = function() { + this._reset(); + this.emit('close'); +}; + +CombinedStream.prototype._reset = function() { + this.writable = false; + this._streams = []; + this._currentStream = null; +}; + +CombinedStream.prototype._checkDataSize = function() { + this._updateDataSize(); + if (this.dataSize <= this.maxDataSize) { + return; + } + + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; + this._emitError(new Error(message)); +}; + +CombinedStream.prototype._updateDataSize = function() { + this.dataSize = 0; + + var self = this; + this._streams.forEach(function(stream) { + if (!stream.dataSize) { + return; + } + + self.dataSize += stream.dataSize; + }); + + if (this._currentStream && this._currentStream.dataSize) { + this.dataSize += this._currentStream.dataSize; + } +}; + +CombinedStream.prototype._emitError = function(err) { + this._reset(); + this.emit('error', err); +}; + + +/***/ }), + +/***/ 99915: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +/*! + * content-type + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ - const dayName = days[date.getUTCDay()] - const day = date.getUTCDate().toString().padStart(2, '0') - const month = months[date.getUTCMonth()] - const year = date.getUTCFullYear() - const hour = date.getUTCHours().toString().padStart(2, '0') - const minute = date.getUTCMinutes().toString().padStart(2, '0') - const second = date.getUTCSeconds().toString().padStart(2, '0') - return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT` -} /** - max-age-av = "Max-Age=" non-zero-digit *DIGIT - ; In practice, both expires-av and max-age-av - ; are limited to dates representable by the - ; user agent. - * @param {number} maxAge + * RegExp to match *( ";" parameter ) in RFC 7231 sec 3.1.1.1 + * + * parameter = token "=" ( token / quoted-string ) + * token = 1*tchar + * tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + * / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + * / DIGIT / ALPHA + * ; any VCHAR, except delimiters + * quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE + * qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text + * obs-text = %x80-FF + * quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) */ -function validateCookieMaxAge (maxAge) { - if (maxAge < 0) { - throw new Error('Invalid cookie max-age') - } -} +var PARAM_REGEXP = /; *([!#$%&'*+.^_`|~0-9A-Za-z-]+) *= *("(?:[\u000b\u0020\u0021\u0023-\u005b\u005d-\u007e\u0080-\u00ff]|\\[\u000b\u0020-\u00ff])*"|[!#$%&'*+.^_`|~0-9A-Za-z-]+) */g // eslint-disable-line no-control-regex +var TEXT_REGEXP = /^[\u000b\u0020-\u007e\u0080-\u00ff]+$/ // eslint-disable-line no-control-regex +var TOKEN_REGEXP = /^[!#$%&'*+.^_`|~0-9A-Za-z-]+$/ /** - * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1 - * @param {import('./index').Cookie} cookie + * RegExp to match quoted-pair in RFC 7230 sec 3.2.6 + * + * quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + * obs-text = %x80-FF */ -function stringify (cookie) { - if (cookie.name.length === 0) { - return null - } +var QESC_REGEXP = /\\([\u000b\u0020-\u00ff])/g // eslint-disable-line no-control-regex - validateCookieName(cookie.name) - validateCookieValue(cookie.value) +/** + * RegExp to match chars that must be quoted-pair in RFC 7230 sec 3.2.6 + */ +var QUOTE_REGEXP = /([\\"])/g - const out = [`${cookie.name}=${cookie.value}`] +/** + * RegExp to match type in RFC 7231 sec 3.1.1.1 + * + * media-type = type "/" subtype + * type = token + * subtype = token + */ +var TYPE_REGEXP = /^[!#$%&'*+.^_`|~0-9A-Za-z-]+\/[!#$%&'*+.^_`|~0-9A-Za-z-]+$/ - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1 - // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2 - if (cookie.name.startsWith('__Secure-')) { - cookie.secure = true - } +/** + * Module exports. + * @public + */ - if (cookie.name.startsWith('__Host-')) { - cookie.secure = true - cookie.domain = null - cookie.path = '/' - } +exports.format = format +exports.parse = parse - if (cookie.secure) { - out.push('Secure') - } +/** + * Format object to media type. + * + * @param {object} obj + * @return {string} + * @public + */ - if (cookie.httpOnly) { - out.push('HttpOnly') +function format (obj) { + if (!obj || typeof obj !== 'object') { + throw new TypeError('argument obj is required') } - if (typeof cookie.maxAge === 'number') { - validateCookieMaxAge(cookie.maxAge) - out.push(`Max-Age=${cookie.maxAge}`) + var parameters = obj.parameters + var type = obj.type + + if (!type || !TYPE_REGEXP.test(type)) { + throw new TypeError('invalid type') } - if (cookie.domain) { - validateCookieDomain(cookie.domain) - out.push(`Domain=${cookie.domain}`) + var string = type + + // append parameters + if (parameters && typeof parameters === 'object') { + var param + var params = Object.keys(parameters).sort() + + for (var i = 0; i < params.length; i++) { + param = params[i] + + if (!TOKEN_REGEXP.test(param)) { + throw new TypeError('invalid parameter name') + } + + string += '; ' + param + '=' + qstring(parameters[param]) + } } - if (cookie.path) { - validateCookiePath(cookie.path) - out.push(`Path=${cookie.path}`) + return string +} + +/** + * Parse media type to object. + * + * @param {string|object} string + * @return {Object} + * @public + */ + +function parse (string) { + if (!string) { + throw new TypeError('argument string is required') } - if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') { - out.push(`Expires=${toIMFDate(cookie.expires)}`) + // support req/res-like objects as argument + var header = typeof string === 'object' + ? getcontenttype(string) + : string + + if (typeof header !== 'string') { + throw new TypeError('argument string is required to be a string') } - if (cookie.sameSite) { - out.push(`SameSite=${cookie.sameSite}`) + var index = header.indexOf(';') + var type = index !== -1 + ? header.slice(0, index).trim() + : header.trim() + + if (!TYPE_REGEXP.test(type)) { + throw new TypeError('invalid media type') } - for (const part of cookie.unparsed) { - if (!part.includes('=')) { - throw new Error('Invalid unparsed') - } + var obj = new ContentType(type.toLowerCase()) - const [key, ...value] = part.split('=') + // parse parameters + if (index !== -1) { + var key + var match + var value - out.push(`${key.trim()}=${value.join('=')}`) + PARAM_REGEXP.lastIndex = index + + while ((match = PARAM_REGEXP.exec(header))) { + if (match.index !== index) { + throw new TypeError('invalid parameter format') + } + + index += match[0].length + key = match[1].toLowerCase() + value = match[2] + + if (value.charCodeAt(0) === 0x22 /* " */) { + // remove quotes + value = value.slice(1, -1) + + // remove escapes + if (value.indexOf('\\') !== -1) { + value = value.replace(QESC_REGEXP, '$1') + } + } + + obj.parameters[key] = value + } + + if (index !== header.length) { + throw new TypeError('invalid parameter format') + } } - return out.join('; ') + return obj } -let kHeadersListNode +/** + * Get content-type from req/res objects. + * + * @param {object} + * @return {Object} + * @private + */ -function getHeadersList (headers) { - if (headers[kHeadersList]) { - return headers[kHeadersList] +function getcontenttype (obj) { + var header + + if (typeof obj.getHeader === 'function') { + // res-like + header = obj.getHeader('content-type') + } else if (typeof obj.headers === 'object') { + // req-like + header = obj.headers && obj.headers['content-type'] } - if (!kHeadersListNode) { - kHeadersListNode = Object.getOwnPropertySymbols(headers).find( - (symbol) => symbol.description === 'headers list' - ) + if (typeof header !== 'string') { + throw new TypeError('content-type header is missing from object') + } - assert(kHeadersListNode, 'Headers cannot be parsed') + return header +} + +/** + * Quote a string if necessary. + * + * @param {string} val + * @return {string} + * @private + */ + +function qstring (val) { + var str = String(val) + + // no need to quote tokens + if (TOKEN_REGEXP.test(str)) { + return str } - const headersList = headers[kHeadersListNode] - assert(headersList) + if (str.length > 0 && !TEXT_REGEXP.test(str)) { + throw new TypeError('invalid parameter value') + } - return headersList + return '"' + str.replace(QUOTE_REGEXP, '\\$1') + '"' } -module.exports = { - isCTLExcludingHtab, - stringify, - getHeadersList +/** + * Class to represent a content type. + * @private + */ +function ContentType (type) { + this.parameters = Object.create(null) + this.type = type } /***/ }), -/***/ 2067: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 30952: +/***/ ((module) => { "use strict"; +/*! + * copy-to - index.js + * Copyright(c) 2014 dead_horse + * MIT Licensed + */ -const net = __nccwpck_require__(1808) -const assert = __nccwpck_require__(9491) -const util = __nccwpck_require__(3983) -const { InvalidArgumentError, ConnectTimeoutError } = __nccwpck_require__(8045) -let tls // include tls conditionally since it is not always available +/** + * slice() reference. + */ -// TODO: session re-use does not wait for the first -// connection to resolve the session and might therefore -// resolve the same servername multiple times even when -// re-use is enabled. +var slice = Array.prototype.slice; -let SessionCache -// FIXME: remove workaround when the Node bug is fixed -// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 -if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) { - SessionCache = class WeakSessionCache { - constructor (maxCachedSessions) { - this._maxCachedSessions = maxCachedSessions - this._sessionCache = new Map() - this._sessionRegistry = new global.FinalizationRegistry((key) => { - if (this._sessionCache.size < this._maxCachedSessions) { - return - } +/** + * Expose copy + * + * ``` + * copy({foo: 'nar', hello: 'copy'}).to({hello: 'world'}); + * copy({foo: 'nar', hello: 'copy'}).toCover({hello: 'world'}); + * ``` + * + * @param {Object} src + * @return {Copy} + */ - const ref = this._sessionCache.get(key) - if (ref !== undefined && ref.deref() === undefined) { - this._sessionCache.delete(key) - } - }) - } +module.exports = Copy; - get (sessionKey) { - const ref = this._sessionCache.get(sessionKey) - return ref ? ref.deref() : null - } - set (sessionKey, session) { - if (this._maxCachedSessions === 0) { - return - } +/** + * Copy + * @param {Object} src + * @param {Boolean} withAccess + */ - this._sessionCache.set(sessionKey, new WeakRef(session)) - this._sessionRegistry.register(session, sessionKey) - } +function Copy(src, withAccess) { + if (!(this instanceof Copy)) return new Copy(src, withAccess); + this.src = src; + this._withAccess = withAccess; +} + +/** + * copy properties include getter and setter + * @param {[type]} val [description] + * @return {[type]} [description] + */ + +Copy.prototype.withAccess = function (w) { + this._withAccess = w !== false; + return this; +}; + +/** + * pick keys in src + * + * @api: public + */ + +Copy.prototype.pick = function(keys) { + if (!Array.isArray(keys)) { + keys = slice.call(arguments); } -} else { - SessionCache = class SimpleSessionCache { - constructor (maxCachedSessions) { - this._maxCachedSessions = maxCachedSessions - this._sessionCache = new Map() + if (keys.length) { + this.keys = keys; + } + return this; +}; + +/** + * copy src to target, + * do not cover any property target has + * @param {Object} to + * + * @api: public + */ + +Copy.prototype.to = function(to) { + to = to || {}; + + if (!this.src) return to; + var keys = this.keys || Object.keys(this.src); + + if (!this._withAccess) { + for (var i = 0; i < keys.length; i++) { + key = keys[i]; + if (to[key] !== undefined) continue; + to[key] = this.src[key]; } + return to; + } - get (sessionKey) { - return this._sessionCache.get(sessionKey) + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + if (!notDefined(to, key)) continue; + var getter = this.src.__lookupGetter__(key); + var setter = this.src.__lookupSetter__(key); + if (getter) to.__defineGetter__(key, getter); + if (setter) to.__defineSetter__(key, setter); + + if (!getter && !setter) { + to[key] = this.src[key]; } + } + return to; +}; - set (sessionKey, session) { - if (this._maxCachedSessions === 0) { - return - } +/** + * copy src to target, + * override any property target has + * @param {Object} to + * + * @api: public + */ - if (this._sessionCache.size >= this._maxCachedSessions) { - // remove the oldest session - const { value: oldestKey } = this._sessionCache.keys().next() - this._sessionCache.delete(oldestKey) - } +Copy.prototype.toCover = function(to) { + var keys = this.keys || Object.keys(this.src); - this._sessionCache.set(sessionKey, session) + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + delete to[key]; + var getter = this.src.__lookupGetter__(key); + var setter = this.src.__lookupSetter__(key); + if (getter) to.__defineGetter__(key, getter); + if (setter) to.__defineSetter__(key, setter); + + if (!getter && !setter) { + to[key] = this.src[key]; } } +}; + +Copy.prototype.override = Copy.prototype.toCover; + +/** + * append another object to src + * @param {Obj} obj + * @return {Copy} + */ + +Copy.prototype.and = function (obj) { + var src = {}; + this.to(src); + this.src = obj; + this.to(src); + this.src = src; + + return this; +}; + +/** + * check obj[key] if not defiend + * @param {Object} obj + * @param {String} key + * @return {Boolean} + */ + +function notDefined(obj, key) { + return obj[key] === undefined + && obj.__lookupGetter__(key) === undefined + && obj.__lookupSetter__(key) === undefined; } -function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) { - if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) { - throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero') + +/***/ }), + +/***/ 95898: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// NOTE: These type checking functions intentionally don't use `instanceof` +// because it is fragile and can be easily faked with `Object.create()`. + +function isArray(arg) { + if (Array.isArray) { + return Array.isArray(arg); } + return objectToString(arg) === '[object Array]'; +} +exports.isArray = isArray; - const options = { path: socketPath, ...opts } - const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions) - timeout = timeout == null ? 10e3 : timeout - allowH2 = allowH2 != null ? allowH2 : false - return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) { - let socket - if (protocol === 'https:') { - if (!tls) { - tls = __nccwpck_require__(4404) - } - servername = servername || options.servername || util.getServerName(host) || null +function isBoolean(arg) { + return typeof arg === 'boolean'; +} +exports.isBoolean = isBoolean; - const sessionKey = servername || hostname - const session = sessionCache.get(sessionKey) || null +function isNull(arg) { + return arg === null; +} +exports.isNull = isNull; - assert(sessionKey) +function isNullOrUndefined(arg) { + return arg == null; +} +exports.isNullOrUndefined = isNullOrUndefined; - socket = tls.connect({ - highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... - ...options, - servername, - session, - localAddress, - // TODO(HTTP/2): Add support for h2c - ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'], - socket: httpSocket, // upgrade socket connection - port: port || 443, - host: hostname - }) +function isNumber(arg) { + return typeof arg === 'number'; +} +exports.isNumber = isNumber; - socket - .on('session', function (session) { - // TODO (fix): Can a session become invalid once established? Don't think so? - sessionCache.set(sessionKey, session) - }) - } else { - assert(!httpSocket, 'httpSocket can only be sent on TLS update') - socket = net.connect({ - highWaterMark: 64 * 1024, // Same as nodejs fs streams. - ...options, - localAddress, - port: port || 80, - host: hostname - }) - } +function isString(arg) { + return typeof arg === 'string'; +} +exports.isString = isString; - // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket - if (options.keepAlive == null || options.keepAlive) { - const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay - socket.setKeepAlive(true, keepAliveInitialDelay) - } +function isSymbol(arg) { + return typeof arg === 'symbol'; +} +exports.isSymbol = isSymbol; - const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) +function isUndefined(arg) { + return arg === void 0; +} +exports.isUndefined = isUndefined; - socket - .setNoDelay(true) - .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { - cancelTimeout() +function isRegExp(re) { + return objectToString(re) === '[object RegExp]'; +} +exports.isRegExp = isRegExp; - if (callback) { - const cb = callback - callback = null - cb(null, this) - } - }) - .on('error', function (err) { - cancelTimeout() +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} +exports.isObject = isObject; - if (callback) { - const cb = callback - callback = null - cb(err) +function isDate(d) { + return objectToString(d) === '[object Date]'; +} +exports.isDate = isDate; + +function isError(e) { + return (objectToString(e) === '[object Error]' || e instanceof Error); +} +exports.isError = isError; + +function isFunction(arg) { + return typeof arg === 'function'; +} +exports.isFunction = isFunction; + +function isPrimitive(arg) { + return arg === null || + typeof arg === 'boolean' || + typeof arg === 'number' || + typeof arg === 'string' || + typeof arg === 'symbol' || // ES6 symbol + typeof arg === 'undefined'; +} +exports.isPrimitive = isPrimitive; + +exports.isBuffer = __nccwpck_require__(14300).Buffer.isBuffer; + +function objectToString(o) { + return Object.prototype.toString.call(o); +} + + +/***/ }), + +/***/ 83201: +/***/ ((__unused_webpack_module, exports) => { + +/*! crc32.js (C) 2014-present SheetJS -- http://sheetjs.com */ +/* vim: set ts=2: */ +/*exported CRC32 */ +var CRC32; +(function (factory) { + /*jshint ignore:start */ + /*eslint-disable */ + if(typeof DO_NOT_EXPORT_CRC === 'undefined') { + if(true) { + factory(exports); + } else {} + } else { + factory(CRC32 = {}); + } + /*eslint-enable */ + /*jshint ignore:end */ +}(function(CRC32) { +CRC32.version = '1.2.2'; +/*global Int32Array */ +function signed_crc_table() { + var c = 0, table = new Array(256); + + for(var n =0; n != 256; ++n){ + c = n; + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + table[n] = c; + } + + return typeof Int32Array !== 'undefined' ? new Int32Array(table) : table; +} + +var T0 = signed_crc_table(); +function slice_by_16_tables(T) { + var c = 0, v = 0, n = 0, table = typeof Int32Array !== 'undefined' ? new Int32Array(4096) : new Array(4096) ; + + for(n = 0; n != 256; ++n) table[n] = T[n]; + for(n = 0; n != 256; ++n) { + v = T[n]; + for(c = 256 + n; c < 4096; c += 256) v = table[c] = (v >>> 8) ^ T[v & 0xFF]; + } + var out = []; + for(n = 1; n != 16; ++n) out[n - 1] = typeof Int32Array !== 'undefined' ? table.subarray(n * 256, n * 256 + 256) : table.slice(n * 256, n * 256 + 256); + return out; +} +var TT = slice_by_16_tables(T0); +var T1 = TT[0], T2 = TT[1], T3 = TT[2], T4 = TT[3], T5 = TT[4]; +var T6 = TT[5], T7 = TT[6], T8 = TT[7], T9 = TT[8], Ta = TT[9]; +var Tb = TT[10], Tc = TT[11], Td = TT[12], Te = TT[13], Tf = TT[14]; +function crc32_bstr(bstr, seed) { + var C = seed ^ -1; + for(var i = 0, L = bstr.length; i < L;) C = (C>>>8) ^ T0[(C^bstr.charCodeAt(i++))&0xFF]; + return ~C; +} + +function crc32_buf(B, seed) { + var C = seed ^ -1, L = B.length - 15, i = 0; + for(; i < L;) C = + Tf[B[i++] ^ (C & 255)] ^ + Te[B[i++] ^ ((C >> 8) & 255)] ^ + Td[B[i++] ^ ((C >> 16) & 255)] ^ + Tc[B[i++] ^ (C >>> 24)] ^ + Tb[B[i++]] ^ Ta[B[i++]] ^ T9[B[i++]] ^ T8[B[i++]] ^ + T7[B[i++]] ^ T6[B[i++]] ^ T5[B[i++]] ^ T4[B[i++]] ^ + T3[B[i++]] ^ T2[B[i++]] ^ T1[B[i++]] ^ T0[B[i++]]; + L += 15; + while(i < L) C = (C>>>8) ^ T0[(C^B[i++])&0xFF]; + return ~C; +} + +function crc32_str(str, seed) { + var C = seed ^ -1; + for(var i = 0, L = str.length, c = 0, d = 0; i < L;) { + c = str.charCodeAt(i++); + if(c < 0x80) { + C = (C>>>8) ^ T0[(C^c)&0xFF]; + } else if(c < 0x800) { + C = (C>>>8) ^ T0[(C ^ (192|((c>>6)&31)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(c&63)))&0xFF]; + } else if(c >= 0xD800 && c < 0xE000) { + c = (c&1023)+64; d = str.charCodeAt(i++)&1023; + C = (C>>>8) ^ T0[(C ^ (240|((c>>8)&7)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((c>>2)&63)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((d>>6)&15)|((c&3)<<4)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(d&63)))&0xFF]; + } else { + C = (C>>>8) ^ T0[(C ^ (224|((c>>12)&15)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((c>>6)&63)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(c&63)))&0xFF]; + } + } + return ~C; +} +CRC32.table = T0; +// $FlowIgnore +CRC32.bstr = crc32_bstr; +// $FlowIgnore +CRC32.buf = crc32_buf; +// $FlowIgnore +CRC32.str = crc32_str; +})); + + +/***/ }), + +/***/ 51512: +/***/ (function(module) { + +/* + * Date Format 1.2.3 + * (c) 2007-2009 Steven Levithan + * MIT license + * + * Includes enhancements by Scott Trenda + * and Kris Kowal + * + * Accepts a date, a mask, or a date and a mask. + * Returns a formatted version of the given date. + * The date defaults to the current date/time. + * The mask defaults to dateFormat.masks.default. + */ + +(function(global) { + 'use strict'; + + var dateFormat = (function() { + var token = /d{1,4}|m{1,4}|yy(?:yy)?|([HhMsTt])\1?|[LloSZWN]|'[^']*'|'[^']*'/g; + var timezone = /\b(?:[PMCEA][SDP]T|(?:Pacific|Mountain|Central|Eastern|Atlantic) (?:Standard|Daylight|Prevailing) Time|(?:GMT|UTC)(?:[-+]\d{4})?)\b/g; + var timezoneClip = /[^-+\dA-Z]/g; + + // Regexes and supporting functions are cached through closure + return function (date, mask, utc, gmt) { + + // You can't provide utc if you skip other args (use the 'UTC:' mask prefix) + if (arguments.length === 1 && kindOf(date) === 'string' && !/\d/.test(date)) { + mask = date; + date = undefined; } - }) + + date = date || new Date; + + if(!(date instanceof Date)) { + date = new Date(date); + } + + if (isNaN(date)) { + throw TypeError('Invalid date'); + } + + mask = String(dateFormat.masks[mask] || mask || dateFormat.masks['default']); + + // Allow setting the utc/gmt argument via the mask + var maskSlice = mask.slice(0, 4); + if (maskSlice === 'UTC:' || maskSlice === 'GMT:') { + mask = mask.slice(4); + utc = true; + if (maskSlice === 'GMT:') { + gmt = true; + } + } + + var _ = utc ? 'getUTC' : 'get'; + var d = date[_ + 'Date'](); + var D = date[_ + 'Day'](); + var m = date[_ + 'Month'](); + var y = date[_ + 'FullYear'](); + var H = date[_ + 'Hours'](); + var M = date[_ + 'Minutes'](); + var s = date[_ + 'Seconds'](); + var L = date[_ + 'Milliseconds'](); + var o = utc ? 0 : date.getTimezoneOffset(); + var W = getWeek(date); + var N = getDayOfWeek(date); + var flags = { + d: d, + dd: pad(d), + ddd: dateFormat.i18n.dayNames[D], + dddd: dateFormat.i18n.dayNames[D + 7], + m: m + 1, + mm: pad(m + 1), + mmm: dateFormat.i18n.monthNames[m], + mmmm: dateFormat.i18n.monthNames[m + 12], + yy: String(y).slice(2), + yyyy: y, + h: H % 12 || 12, + hh: pad(H % 12 || 12), + H: H, + HH: pad(H), + M: M, + MM: pad(M), + s: s, + ss: pad(s), + l: pad(L, 3), + L: pad(Math.round(L / 10)), + t: H < 12 ? 'a' : 'p', + tt: H < 12 ? 'am' : 'pm', + T: H < 12 ? 'A' : 'P', + TT: H < 12 ? 'AM' : 'PM', + Z: gmt ? 'GMT' : utc ? 'UTC' : (String(date).match(timezone) || ['']).pop().replace(timezoneClip, ''), + o: (o > 0 ? '-' : '+') + pad(Math.floor(Math.abs(o) / 60) * 100 + Math.abs(o) % 60, 4), + S: ['th', 'st', 'nd', 'rd'][d % 10 > 3 ? 0 : (d % 100 - d % 10 != 10) * d % 10], + W: W, + N: N + }; + + return mask.replace(token, function (match) { + if (match in flags) { + return flags[match]; + } + return match.slice(1, match.length - 1); + }); + }; + })(); - return socket + dateFormat.masks = { + 'default': 'ddd mmm dd yyyy HH:MM:ss', + 'shortDate': 'm/d/yy', + 'mediumDate': 'mmm d, yyyy', + 'longDate': 'mmmm d, yyyy', + 'fullDate': 'dddd, mmmm d, yyyy', + 'shortTime': 'h:MM TT', + 'mediumTime': 'h:MM:ss TT', + 'longTime': 'h:MM:ss TT Z', + 'isoDate': 'yyyy-mm-dd', + 'isoTime': 'HH:MM:ss', + 'isoDateTime': 'yyyy-mm-dd\'T\'HH:MM:sso', + 'isoUtcDateTime': 'UTC:yyyy-mm-dd\'T\'HH:MM:ss\'Z\'', + 'expiresHeaderFormat': 'ddd, dd mmm yyyy HH:MM:ss Z' + }; + + // Internationalization strings + dateFormat.i18n = { + dayNames: [ + 'Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', + 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' + ], + monthNames: [ + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec', + 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December' + ] + }; + +function pad(val, len) { + val = String(val); + len = len || 2; + while (val.length < len) { + val = '0' + val; + } + return val; +} + +/** + * Get the ISO 8601 week number + * Based on comments from + * http://techblog.procurios.nl/k/n618/news/view/33796/14863/Calculate-ISO-8601-week-and-year-in-javascript.html + * + * @param {Object} `date` + * @return {Number} + */ +function getWeek(date) { + // Remove time components of date + var targetThursday = new Date(date.getFullYear(), date.getMonth(), date.getDate()); + + // Change date to Thursday same week + targetThursday.setDate(targetThursday.getDate() - ((targetThursday.getDay() + 6) % 7) + 3); + + // Take January 4th as it is always in week 1 (see ISO 8601) + var firstThursday = new Date(targetThursday.getFullYear(), 0, 4); + + // Change date to Thursday same week + firstThursday.setDate(firstThursday.getDate() - ((firstThursday.getDay() + 6) % 7) + 3); + + // Check if daylight-saving-time-switch occurred and correct for it + var ds = targetThursday.getTimezoneOffset() - firstThursday.getTimezoneOffset(); + targetThursday.setHours(targetThursday.getHours() - ds); + + // Number of weeks between target Thursday and first Thursday + var weekDiff = (targetThursday - firstThursday) / (86400000*7); + return 1 + Math.floor(weekDiff); +} + +/** + * Get ISO-8601 numeric representation of the day of the week + * 1 (for Monday) through 7 (for Sunday) + * + * @param {Object} `date` + * @return {Number} + */ +function getDayOfWeek(date) { + var dow = date.getDay(); + if(dow === 0) { + dow = 7; } + return dow; } -function setupTimeout (onConnectTimeout, timeout) { - if (!timeout) { - return () => {} +/** + * kind-of shortcut + * @param {*} val + * @return {String} + */ +function kindOf(val) { + if (val === null) { + return 'null'; } - let s1 = null - let s2 = null - const timeoutId = setTimeout(() => { - // setImmediate is added to make sure that we priotorise socket error events over timeouts - s1 = setImmediate(() => { - if (process.platform === 'win32') { - // Windows needs an extra setImmediate probably due to implementation differences in the socket logic - s2 = setImmediate(() => onConnectTimeout()) - } else { - onConnectTimeout() - } - }) - }, timeout) - return () => { - clearTimeout(timeoutId) - clearImmediate(s1) - clearImmediate(s2) + if (val === undefined) { + return 'undefined'; } -} -function onConnectTimeout (socket) { - util.destroy(socket, new ConnectTimeoutError()) -} + if (typeof val !== 'object') { + return typeof val; + } -module.exports = buildConnector + if (Array.isArray(val)) { + return 'array'; + } + + return {}.toString.call(val) + .slice(8, -1).toLowerCase(); +}; + + + + if (typeof define === 'function' && define.amd) { + define(function () { + return dateFormat; + }); + } else if (true) { + module.exports = dateFormat; + } else {} +})(this); /***/ }), -/***/ 4462: -/***/ ((module) => { +/***/ 28222: +/***/ ((module, exports, __nccwpck_require__) => { -"use strict"; +/* eslint-env browser */ +/** + * This is the web browser implementation of `debug()`. + */ -/** @type {Record} */ -const headerNameLowerCasedRecord = {} +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); -// https://developer.mozilla.org/docs/Web/HTTP/Headers -const wellknownHeaderNames = [ - 'Accept', - 'Accept-Encoding', - 'Accept-Language', - 'Accept-Ranges', - 'Access-Control-Allow-Credentials', - 'Access-Control-Allow-Headers', - 'Access-Control-Allow-Methods', - 'Access-Control-Allow-Origin', - 'Access-Control-Expose-Headers', - 'Access-Control-Max-Age', - 'Access-Control-Request-Headers', - 'Access-Control-Request-Method', - 'Age', - 'Allow', - 'Alt-Svc', - 'Alt-Used', - 'Authorization', - 'Cache-Control', - 'Clear-Site-Data', - 'Connection', - 'Content-Disposition', - 'Content-Encoding', - 'Content-Language', - 'Content-Length', - 'Content-Location', - 'Content-Range', - 'Content-Security-Policy', - 'Content-Security-Policy-Report-Only', - 'Content-Type', - 'Cookie', - 'Cross-Origin-Embedder-Policy', - 'Cross-Origin-Opener-Policy', - 'Cross-Origin-Resource-Policy', - 'Date', - 'Device-Memory', - 'Downlink', - 'ECT', - 'ETag', - 'Expect', - 'Expect-CT', - 'Expires', - 'Forwarded', - 'From', - 'Host', - 'If-Match', - 'If-Modified-Since', - 'If-None-Match', - 'If-Range', - 'If-Unmodified-Since', - 'Keep-Alive', - 'Last-Modified', - 'Link', - 'Location', - 'Max-Forwards', - 'Origin', - 'Permissions-Policy', - 'Pragma', - 'Proxy-Authenticate', - 'Proxy-Authorization', - 'RTT', - 'Range', - 'Referer', - 'Referrer-Policy', - 'Refresh', - 'Retry-After', - 'Sec-WebSocket-Accept', - 'Sec-WebSocket-Extensions', - 'Sec-WebSocket-Key', - 'Sec-WebSocket-Protocol', - 'Sec-WebSocket-Version', - 'Server', - 'Server-Timing', - 'Service-Worker-Allowed', - 'Service-Worker-Navigation-Preload', - 'Set-Cookie', - 'SourceMap', - 'Strict-Transport-Security', - 'Supports-Loading-Mode', - 'TE', - 'Timing-Allow-Origin', - 'Trailer', - 'Transfer-Encoding', - 'Upgrade', - 'Upgrade-Insecure-Requests', - 'User-Agent', - 'Vary', - 'Via', - 'WWW-Authenticate', - 'X-Content-Type-Options', - 'X-DNS-Prefetch-Control', - 'X-Frame-Options', - 'X-Permitted-Cross-Domain-Policies', - 'X-Powered-By', - 'X-Requested-With', - 'X-XSS-Protection' -] +/** + * Colors. + */ -for (let i = 0; i < wellknownHeaderNames.length; ++i) { - const key = wellknownHeaderNames[i] - const lowerCasedKey = key.toLowerCase() - headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = - lowerCasedKey -} +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; -// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. -Object.setPrototypeOf(headerNameLowerCasedRecord, null) +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ -module.exports = { - wellknownHeaderNames, - headerNameLowerCasedRecord +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); } +/** + * Colorize log arguments if enabled. + * + * @api public + */ -/***/ }), - -/***/ 8045: -/***/ ((module) => { - -"use strict"; +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); -class UndiciError extends Error { - constructor (message) { - super(message) - this.name = 'UndiciError' - this.code = 'UND_ERR' - } +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } } -class ConnectTimeoutError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, ConnectTimeoutError) - this.name = 'ConnectTimeoutError' - this.message = message || 'Connect Timeout Error' - this.code = 'UND_ERR_CONNECT_TIMEOUT' - } -} +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } -class HeadersTimeoutError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, HeadersTimeoutError) - this.name = 'HeadersTimeoutError' - this.message = message || 'Headers Timeout Error' - this.code = 'UND_ERR_HEADERS_TIMEOUT' - } -} + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } -class HeadersOverflowError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, HeadersOverflowError) - this.name = 'HeadersOverflowError' - this.message = message || 'Headers Overflow Error' - this.code = 'UND_ERR_HEADERS_OVERFLOW' - } + return r; } -class BodyTimeoutError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, BodyTimeoutError) - this.name = 'BodyTimeoutError' - this.message = message || 'Body Timeout Error' - this.code = 'UND_ERR_BODY_TIMEOUT' - } -} +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ -class ResponseStatusCodeError extends UndiciError { - constructor (message, statusCode, headers, body) { - super(message) - Error.captureStackTrace(this, ResponseStatusCodeError) - this.name = 'ResponseStatusCodeError' - this.message = message || 'Response Status Code Error' - this.code = 'UND_ERR_RESPONSE_STATUS_CODE' - this.body = body - this.status = statusCode - this.statusCode = statusCode - this.headers = headers - } +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } } -class InvalidArgumentError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, InvalidArgumentError) - this.name = 'InvalidArgumentError' - this.message = message || 'Invalid Argument Error' - this.code = 'UND_ERR_INVALID_ARG' - } -} +module.exports = __nccwpck_require__(46243)(exports); -class InvalidReturnValueError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, InvalidReturnValueError) - this.name = 'InvalidReturnValueError' - this.message = message || 'Invalid Return Value Error' - this.code = 'UND_ERR_INVALID_RETURN_VALUE' - } -} +const {formatters} = module.exports; -class RequestAbortedError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, RequestAbortedError) - this.name = 'AbortError' - this.message = message || 'Request aborted' - this.code = 'UND_ERR_ABORTED' - } -} +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ -class InformationalError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, InformationalError) - this.name = 'InformationalError' - this.message = message || 'Request information' - this.code = 'UND_ERR_INFO' - } -} +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; -class RequestContentLengthMismatchError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, RequestContentLengthMismatchError) - this.name = 'RequestContentLengthMismatchError' - this.message = message || 'Request body length does not match content-length header' - this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH' - } -} -class ResponseContentLengthMismatchError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, ResponseContentLengthMismatchError) - this.name = 'ResponseContentLengthMismatchError' - this.message = message || 'Response body length does not match content-length header' - this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH' - } -} +/***/ }), -class ClientDestroyedError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, ClientDestroyedError) - this.name = 'ClientDestroyedError' - this.message = message || 'The client is destroyed' - this.code = 'UND_ERR_DESTROYED' - } -} +/***/ 46243: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -class ClientClosedError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, ClientClosedError) - this.name = 'ClientClosedError' - this.message = message || 'The client is closed' - this.code = 'UND_ERR_CLOSED' - } -} -class SocketError extends UndiciError { - constructor (message, socket) { - super(message) - Error.captureStackTrace(this, SocketError) - this.name = 'SocketError' - this.message = message || 'Socket error' - this.code = 'UND_ERR_SOCKET' - this.socket = socket - } -} +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ -class NotSupportedError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, NotSupportedError) - this.name = 'NotSupportedError' - this.message = message || 'Not supported error' - this.code = 'UND_ERR_NOT_SUPPORTED' - } -} +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __nccwpck_require__(80900); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; -class BalancedPoolMissingUpstreamError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, NotSupportedError) - this.name = 'MissingUpstreamError' - this.message = message || 'No upstream has been added to the BalancedPool' - this.code = 'UND_ERR_BPL_MISSING_UPSTREAM' - } -} -class HTTPParserError extends Error { - constructor (message, code, data) { - super(message) - Error.captureStackTrace(this, HTTPParserError) - this.name = 'HTTPParserError' - this.code = code ? `HPE_${code}` : undefined - this.data = data ? data.toString() : undefined - } -} +/***/ }), -class ResponseExceededMaxSizeError extends UndiciError { - constructor (message) { - super(message) - Error.captureStackTrace(this, ResponseExceededMaxSizeError) - this.name = 'ResponseExceededMaxSizeError' - this.message = message || 'Response content exceeded max size' - this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE' - } -} +/***/ 38237: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -class RequestRetryError extends UndiciError { - constructor (message, code, { headers, data }) { - super(message) - Error.captureStackTrace(this, RequestRetryError) - this.name = 'RequestRetryError' - this.message = message || 'Request retry error' - this.code = 'UND_ERR_REQ_RETRY' - this.statusCode = code - this.data = data - this.headers = headers - } -} +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ -module.exports = { - HTTPParserError, - UndiciError, - HeadersTimeoutError, - HeadersOverflowError, - BodyTimeoutError, - RequestContentLengthMismatchError, - ConnectTimeoutError, - ResponseStatusCodeError, - InvalidArgumentError, - InvalidReturnValueError, - RequestAbortedError, - ClientDestroyedError, - ClientClosedError, - InformationalError, - SocketError, - NotSupportedError, - ResponseContentLengthMismatchError, - BalancedPoolMissingUpstreamError, - ResponseExceededMaxSizeError, - RequestRetryError +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = __nccwpck_require__(28222); +} else { + module.exports = __nccwpck_require__(35332); } /***/ }), -/***/ 2905: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 35332: +/***/ ((module, exports, __nccwpck_require__) => { -"use strict"; +/** + * Module dependencies. + */ +const tty = __nccwpck_require__(76224); +const util = __nccwpck_require__(73837); -const { - InvalidArgumentError, - NotSupportedError -} = __nccwpck_require__(8045) -const assert = __nccwpck_require__(9491) -const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = __nccwpck_require__(2785) -const util = __nccwpck_require__(3983) +/** + * This is the Node.js implementation of `debug()`. + */ -// tokenRegExp and headerCharRegex have been lifted from -// https://github.com/nodejs/node/blob/main/lib/_http_common.js +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); /** - * Verifies that the given val is a valid HTTP token - * per the rules defined in RFC 7230 - * See https://tools.ietf.org/html/rfc7230#section-3.2.6 + * Colors. */ -const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = __nccwpck_require__(59318); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} /** - * Matches if val contains an invalid field-vchar - * field-value = *( field-content / obs-fold ) - * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] - * field-vchar = VCHAR / obs-text + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js */ -const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/ -// Verifies that a given path is valid does not contain control chars \x00 to \x20 -const invalidPathRegex = /[^\u0021-\u00ff]/ +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); -const kHandler = Symbol('handler') +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ -const channels = {} +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} -let extractBody +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ -try { - const diagnosticsChannel = __nccwpck_require__(7643) - channels.create = diagnosticsChannel.channel('undici:request:create') - channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent') - channels.headers = diagnosticsChannel.channel('undici:request:headers') - channels.trailers = diagnosticsChannel.channel('undici:request:trailers') - channels.error = diagnosticsChannel.channel('undici:request:error') -} catch { - channels.create = { hasSubscribers: false } - channels.bodySent = { hasSubscribers: false } - channels.headers = { hasSubscribers: false } - channels.trailers = { hasSubscribers: false } - channels.error = { hasSubscribers: false } +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } } -class Request { - constructor (origin, { - path, - method, - body, - headers, - query, - idempotent, - blocking, - upgrade, - headersTimeout, - bodyTimeout, - reset, - throwOnError, - expectContinue - }, handler) { - if (typeof path !== 'string') { - throw new InvalidArgumentError('path must be a string') - } else if ( - path[0] !== '/' && - !(path.startsWith('http://') || path.startsWith('https://')) && - method !== 'CONNECT' - ) { - throw new InvalidArgumentError('path must be an absolute URL or start with a slash') - } else if (invalidPathRegex.exec(path) !== null) { - throw new InvalidArgumentError('invalid request path') - } +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} - if (typeof method !== 'string') { - throw new InvalidArgumentError('method must be a string') - } else if (tokenRegExp.exec(method) === null) { - throw new InvalidArgumentError('invalid request method') - } +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ - if (upgrade && typeof upgrade !== 'string') { - throw new InvalidArgumentError('upgrade must be a string') - } +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} - if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) { - throw new InvalidArgumentError('invalid headersTimeout') - } +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} - if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) { - throw new InvalidArgumentError('invalid bodyTimeout') - } +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ - if (reset != null && typeof reset !== 'boolean') { - throw new InvalidArgumentError('invalid reset') - } +function load() { + return process.env.DEBUG; +} - if (expectContinue != null && typeof expectContinue !== 'boolean') { - throw new InvalidArgumentError('invalid expectContinue') - } +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ - this.headersTimeout = headersTimeout +function init(debug) { + debug.inspectOpts = {}; - this.bodyTimeout = bodyTimeout + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} - this.throwOnError = throwOnError === true +module.exports = __nccwpck_require__(46243)(exports); - this.method = method +const {formatters} = module.exports; - this.abort = null +/** + * Map %o to `util.inspect()`, all on a single line. + */ - if (body == null) { - this.body = null - } else if (util.isStream(body)) { - this.body = body +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; - const rState = this.body._readableState - if (!rState || !rState.autoDestroy) { - this.endHandler = function autoDestroy () { - util.destroy(this) - } - this.body.on('end', this.endHandler) - } +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ - this.errorHandler = err => { - if (this.abort) { - this.abort(err) - } else { - this.error = err - } - } - this.body.on('error', this.errorHandler) - } else if (util.isBuffer(body)) { - this.body = body.byteLength ? body : null - } else if (ArrayBuffer.isView(body)) { - this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null - } else if (body instanceof ArrayBuffer) { - this.body = body.byteLength ? Buffer.from(body) : null - } else if (typeof body === 'string') { - this.body = body.length ? Buffer.from(body) : null - } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) { - this.body = body - } else { - throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') - } +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; - this.completed = false - this.aborted = false +/***/ }), - this.upgrade = upgrade || null +/***/ 26834: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.path = query ? util.buildURL(path, query) : path +"use strict"; +/**! + * default-user-agent - index.js + * + * Copyright(c) fengmk2 and other contributors. + * MIT Licensed + * + * Authors: + * fengmk2 (http://fengmk2.com) + */ - this.origin = origin - this.idempotent = idempotent == null - ? method === 'HEAD' || method === 'GET' - : idempotent - this.blocking = blocking == null ? false : blocking +/** + * Module dependencies. + */ - this.reset = reset == null ? null : reset +var osName = __nccwpck_require__(54824); - this.host = null +var USER_AGENT = 'Node.js/' + process.version.slice(1) + + ' (' + osName() + '; ' + process.arch + ')'; - this.contentLength = null +module.exports = function ua(name, version) { + if (arguments.length !== 2) { + return USER_AGENT; + } + return name + '/' + version + ' ' + USER_AGENT; +}; - this.contentType = null - this.headers = '' +/***/ }), - // Only for H2 - this.expectContinue = expectContinue != null ? expectContinue : false +/***/ 54564: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (Array.isArray(headers)) { - if (headers.length % 2 !== 0) { - throw new InvalidArgumentError('headers array must be even') - } - for (let i = 0; i < headers.length; i += 2) { - processHeader(this, headers[i], headers[i + 1]) - } - } else if (headers && typeof headers === 'object') { - const keys = Object.keys(headers) - for (let i = 0; i < keys.length; i++) { - const key = keys[i] - processHeader(this, key, headers[key]) - } - } else if (headers != null) { - throw new InvalidArgumentError('headers must be an object or an array') - } +"use strict"; - if (util.isFormDataLike(this.body)) { - if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) { - throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.') - } - if (!extractBody) { - extractBody = (__nccwpck_require__(1472).extractBody) - } +var $defineProperty = __nccwpck_require__(6123); - const [bodyStream, contentType] = extractBody(body) - if (this.contentType == null) { - this.contentType = contentType - this.headers += `content-type: ${contentType}\r\n` - } - this.body = bodyStream.stream - this.contentLength = bodyStream.length - } else if (util.isBlobLike(body) && this.contentType == null && body.type) { - this.contentType = body.type - this.headers += `content-type: ${body.type}\r\n` - } +var $SyntaxError = __nccwpck_require__(75474); +var $TypeError = __nccwpck_require__(6361); - util.validateHandler(handler, method, upgrade) +var gopd = __nccwpck_require__(18501); - this.servername = util.getServerName(this.host) +/** @type {import('.')} */ +module.exports = function defineDataProperty( + obj, + property, + value +) { + if (!obj || (typeof obj !== 'object' && typeof obj !== 'function')) { + throw new $TypeError('`obj` must be an object or a function`'); + } + if (typeof property !== 'string' && typeof property !== 'symbol') { + throw new $TypeError('`property` must be a string or a symbol`'); + } + if (arguments.length > 3 && typeof arguments[3] !== 'boolean' && arguments[3] !== null) { + throw new $TypeError('`nonEnumerable`, if provided, must be a boolean or null'); + } + if (arguments.length > 4 && typeof arguments[4] !== 'boolean' && arguments[4] !== null) { + throw new $TypeError('`nonWritable`, if provided, must be a boolean or null'); + } + if (arguments.length > 5 && typeof arguments[5] !== 'boolean' && arguments[5] !== null) { + throw new $TypeError('`nonConfigurable`, if provided, must be a boolean or null'); + } + if (arguments.length > 6 && typeof arguments[6] !== 'boolean') { + throw new $TypeError('`loose`, if provided, must be a boolean'); + } + + var nonEnumerable = arguments.length > 3 ? arguments[3] : null; + var nonWritable = arguments.length > 4 ? arguments[4] : null; + var nonConfigurable = arguments.length > 5 ? arguments[5] : null; + var loose = arguments.length > 6 ? arguments[6] : false; + + /* @type {false | TypedPropertyDescriptor} */ + var desc = !!gopd && gopd(obj, property); + + if ($defineProperty) { + $defineProperty(obj, property, { + configurable: nonConfigurable === null && desc ? desc.configurable : !nonConfigurable, + enumerable: nonEnumerable === null && desc ? desc.enumerable : !nonEnumerable, + value: value, + writable: nonWritable === null && desc ? desc.writable : !nonWritable + }); + } else if (loose || (!nonEnumerable && !nonWritable && !nonConfigurable)) { + // must fall back to [[Set]], and was not explicitly asked to make non-enumerable, non-writable, or non-configurable + obj[property] = value; // eslint-disable-line no-param-reassign + } else { + throw new $SyntaxError('This environment does not support defining a property as non-configurable, non-writable, or non-enumerable.'); + } +}; - this[kHandler] = handler - if (channels.create.hasSubscribers) { - channels.create.publish({ request: this }) - } - } +/***/ }), - onBodySent (chunk) { - if (this[kHandler].onBodySent) { - try { - return this[kHandler].onBodySent(chunk) - } catch (err) { - this.abort(err) - } - } - } +/***/ 18611: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - onRequestSent () { - if (channels.bodySent.hasSubscribers) { - channels.bodySent.publish({ request: this }) - } +var Stream = (__nccwpck_require__(12781).Stream); +var util = __nccwpck_require__(73837); - if (this[kHandler].onRequestSent) { - try { - return this[kHandler].onRequestSent() - } catch (err) { - this.abort(err) - } - } - } +module.exports = DelayedStream; +function DelayedStream() { + this.source = null; + this.dataSize = 0; + this.maxDataSize = 1024 * 1024; + this.pauseStream = true; - onConnect (abort) { - assert(!this.aborted) - assert(!this.completed) + this._maxDataSizeExceeded = false; + this._released = false; + this._bufferedEvents = []; +} +util.inherits(DelayedStream, Stream); - if (this.error) { - abort(this.error) - } else { - this.abort = abort - return this[kHandler].onConnect(abort) - } +DelayedStream.create = function(source, options) { + var delayedStream = new this(); + + options = options || {}; + for (var option in options) { + delayedStream[option] = options[option]; } - onHeaders (statusCode, headers, resume, statusText) { - assert(!this.aborted) - assert(!this.completed) + delayedStream.source = source; - if (channels.headers.hasSubscribers) { - channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) - } + var realEmit = source.emit; + source.emit = function() { + delayedStream._handleEmit(arguments); + return realEmit.apply(source, arguments); + }; - try { - return this[kHandler].onHeaders(statusCode, headers, resume, statusText) - } catch (err) { - this.abort(err) - } + source.on('error', function() {}); + if (delayedStream.pauseStream) { + source.pause(); } - onData (chunk) { - assert(!this.aborted) - assert(!this.completed) + return delayedStream; +}; - try { - return this[kHandler].onData(chunk) - } catch (err) { - this.abort(err) - return false - } +Object.defineProperty(DelayedStream.prototype, 'readable', { + configurable: true, + enumerable: true, + get: function() { + return this.source.readable; } +}); - onUpgrade (statusCode, headers, socket) { - assert(!this.aborted) - assert(!this.completed) +DelayedStream.prototype.setEncoding = function() { + return this.source.setEncoding.apply(this.source, arguments); +}; - return this[kHandler].onUpgrade(statusCode, headers, socket) +DelayedStream.prototype.resume = function() { + if (!this._released) { + this.release(); } - onComplete (trailers) { - this.onFinally() - - assert(!this.aborted) + this.source.resume(); +}; - this.completed = true - if (channels.trailers.hasSubscribers) { - channels.trailers.publish({ request: this, trailers }) - } +DelayedStream.prototype.pause = function() { + this.source.pause(); +}; - try { - return this[kHandler].onComplete(trailers) - } catch (err) { - // TODO (fix): This might be a bad idea? - this.onError(err) - } - } +DelayedStream.prototype.release = function() { + this._released = true; - onError (error) { - this.onFinally() + this._bufferedEvents.forEach(function(args) { + this.emit.apply(this, args); + }.bind(this)); + this._bufferedEvents = []; +}; - if (channels.error.hasSubscribers) { - channels.error.publish({ request: this, error }) - } +DelayedStream.prototype.pipe = function() { + var r = Stream.prototype.pipe.apply(this, arguments); + this.resume(); + return r; +}; - if (this.aborted) { - return - } - this.aborted = true +DelayedStream.prototype._handleEmit = function(args) { + if (this._released) { + this.emit.apply(this, args); + return; + } - return this[kHandler].onError(error) + if (args[0] === 'data') { + this.dataSize += args[1].length; + this._checkIfMaxDataSizeExceeded(); } - onFinally () { - if (this.errorHandler) { - this.body.off('error', this.errorHandler) - this.errorHandler = null - } + this._bufferedEvents.push(args); +}; - if (this.endHandler) { - this.body.off('end', this.endHandler) - this.endHandler = null - } +DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { + if (this._maxDataSizeExceeded) { + return; } - // TODO: adjust to support H2 - addHeader (key, value) { - processHeader(this, key, value) - return this + if (this.dataSize <= this.maxDataSize) { + return; } - static [kHTTP1BuildRequest] (origin, opts, handler) { - // TODO: Migrate header parsing here, to make Requests - // HTTP agnostic - return new Request(origin, opts, handler) - } + this._maxDataSizeExceeded = true; + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' + this.emit('error', new Error(message)); +}; - static [kHTTP2BuildRequest] (origin, opts, handler) { - const headers = opts.headers - opts = { ...opts, headers: null } - const request = new Request(origin, opts, handler) +/***/ }), - request.headers = {} +/***/ 43225: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (Array.isArray(headers)) { - if (headers.length % 2 !== 0) { - throw new InvalidArgumentError('headers array must be even') - } - for (let i = 0; i < headers.length; i += 2) { - processHeader(request, headers[i], headers[i + 1], true) - } - } else if (headers && typeof headers === 'object') { - const keys = Object.keys(headers) - for (let i = 0; i < keys.length; i++) { - const key = keys[i] - processHeader(request, key, headers[key], true) - } - } else if (headers != null) { - throw new InvalidArgumentError('headers must be an object or an array') - } +"use strict"; +/*! + * destroy + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ - return request - } - static [kHTTP2CopyHeaders] (raw) { - const rawHeaders = raw.split('\r\n') - const headers = {} - for (const header of rawHeaders) { - const [key, value] = header.split(': ') +/** + * Module dependencies. + * @private + */ - if (value == null || value.length === 0) continue +var EventEmitter = (__nccwpck_require__(82361).EventEmitter) +var ReadStream = (__nccwpck_require__(57147).ReadStream) +var Stream = __nccwpck_require__(12781) +var Zlib = __nccwpck_require__(59796) - if (headers[key]) headers[key] += `,${value}` - else headers[key] = value - } +/** + * Module exports. + * @public + */ - return headers +module.exports = destroy + +/** + * Destroy the given stream, and optionally suppress any future `error` events. + * + * @param {object} stream + * @param {boolean} suppress + * @public + */ + +function destroy (stream, suppress) { + if (isFsReadStream(stream)) { + destroyReadStream(stream) + } else if (isZlibStream(stream)) { + destroyZlibStream(stream) + } else if (hasDestroy(stream)) { + stream.destroy() + } + + if (isEventEmitter(stream) && suppress) { + stream.removeAllListeners('error') + stream.addListener('error', noop) } + + return stream } -function processHeaderValue (key, val, skipAppend) { - if (val && typeof val === 'object') { - throw new InvalidArgumentError(`invalid ${key} header`) +/** + * Destroy a ReadStream. + * + * @param {object} stream + * @private + */ + +function destroyReadStream (stream) { + stream.destroy() + + if (typeof stream.close === 'function') { + // node.js core bug work-around + stream.on('open', onOpenClose) } +} - val = val != null ? `${val}` : '' +/** + * Close a Zlib stream. + * + * Zlib streams below Node.js 4.5.5 have a buggy implementation + * of .close() when zlib encountered an error. + * + * @param {object} stream + * @private + */ - if (headerCharRegex.exec(val) !== null) { - throw new InvalidArgumentError(`invalid ${key} header`) +function closeZlibStream (stream) { + if (stream._hadError === true) { + var prop = stream._binding === null + ? '_binding' + : '_handle' + + stream[prop] = { + close: function () { this[prop] = null } + } } - return skipAppend ? val : `${key}: ${val}\r\n` + stream.close() } -function processHeader (request, key, val, skipAppend = false) { - if (val && (typeof val === 'object' && !Array.isArray(val))) { - throw new InvalidArgumentError(`invalid ${key} header`) - } else if (val === undefined) { - return - } +/** + * Destroy a Zlib stream. + * + * Zlib streams don't have a destroy function in Node.js 6. On top of that + * simply calling destroy on a zlib stream in Node.js 8+ will result in a + * memory leak. So until that is fixed, we need to call both close AND destroy. + * + * PR to fix memory leak: https://github.com/nodejs/node/pull/23734 + * + * In Node.js 6+8, it's important that destroy is called before close as the + * stream would otherwise emit the error 'zlib binding closed'. + * + * @param {object} stream + * @private + */ - if ( - request.host === null && - key.length === 4 && - key.toLowerCase() === 'host' - ) { - if (headerCharRegex.exec(val) !== null) { - throw new InvalidArgumentError(`invalid ${key} header`) - } - // Consumed by Client - request.host = val - } else if ( - request.contentLength === null && - key.length === 14 && - key.toLowerCase() === 'content-length' - ) { - request.contentLength = parseInt(val, 10) - if (!Number.isFinite(request.contentLength)) { - throw new InvalidArgumentError('invalid content-length header') - } - } else if ( - request.contentType === null && - key.length === 12 && - key.toLowerCase() === 'content-type' - ) { - request.contentType = val - if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) - else request.headers += processHeaderValue(key, val) - } else if ( - key.length === 17 && - key.toLowerCase() === 'transfer-encoding' - ) { - throw new InvalidArgumentError('invalid transfer-encoding header') - } else if ( - key.length === 10 && - key.toLowerCase() === 'connection' - ) { - const value = typeof val === 'string' ? val.toLowerCase() : null - if (value !== 'close' && value !== 'keep-alive') { - throw new InvalidArgumentError('invalid connection header') - } else if (value === 'close') { - request.reset = true - } - } else if ( - key.length === 10 && - key.toLowerCase() === 'keep-alive' - ) { - throw new InvalidArgumentError('invalid keep-alive header') - } else if ( - key.length === 7 && - key.toLowerCase() === 'upgrade' - ) { - throw new InvalidArgumentError('invalid upgrade header') - } else if ( - key.length === 6 && - key.toLowerCase() === 'expect' - ) { - throw new NotSupportedError('expect header not supported') - } else if (tokenRegExp.exec(key) === null) { - throw new InvalidArgumentError('invalid header key') - } else { - if (Array.isArray(val)) { - for (let i = 0; i < val.length; i++) { - if (skipAppend) { - if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}` - else request.headers[key] = processHeaderValue(key, val[i], skipAppend) - } else { - request.headers += processHeaderValue(key, val[i]) - } +function destroyZlibStream (stream) { + if (typeof stream.destroy === 'function') { + // node.js core bug work-around + // istanbul ignore if: node.js 0.8 + if (stream._binding) { + // node.js < 0.10.0 + stream.destroy() + if (stream._processing) { + stream._needDrain = true + stream.once('drain', onDrainClearBinding) + } else { + stream._binding.clear() } + } else if (stream._destroy && stream._destroy !== Stream.Transform.prototype._destroy) { + // node.js >= 12, ^11.1.0, ^10.15.1 + stream.destroy() + } else if (stream._destroy && typeof stream.close === 'function') { + // node.js 7, 8 + stream.destroyed = true + stream.close() } else { - if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) - else request.headers += processHeaderValue(key, val) + // fallback + // istanbul ignore next + stream.destroy() } + } else if (typeof stream.close === 'function') { + // node.js < 8 fallback + closeZlibStream(stream) } } -module.exports = Request - +/** + * Determine if stream has destroy. + * @private + */ -/***/ }), +function hasDestroy (stream) { + return stream instanceof Stream && + typeof stream.destroy === 'function' +} -/***/ 2785: -/***/ ((module) => { +/** + * Determine if val is EventEmitter. + * @private + */ -module.exports = { - kClose: Symbol('close'), - kDestroy: Symbol('destroy'), - kDispatch: Symbol('dispatch'), - kUrl: Symbol('url'), - kWriting: Symbol('writing'), - kResuming: Symbol('resuming'), - kQueue: Symbol('queue'), - kConnect: Symbol('connect'), - kConnecting: Symbol('connecting'), - kHeadersList: Symbol('headers list'), - kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'), - kKeepAliveMaxTimeout: Symbol('max keep alive timeout'), - kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'), - kKeepAliveTimeoutValue: Symbol('keep alive timeout'), - kKeepAlive: Symbol('keep alive'), - kHeadersTimeout: Symbol('headers timeout'), - kBodyTimeout: Symbol('body timeout'), - kServerName: Symbol('server name'), - kLocalAddress: Symbol('local address'), - kHost: Symbol('host'), - kNoRef: Symbol('no ref'), - kBodyUsed: Symbol('used'), - kRunning: Symbol('running'), - kBlocking: Symbol('blocking'), - kPending: Symbol('pending'), - kSize: Symbol('size'), - kBusy: Symbol('busy'), - kQueued: Symbol('queued'), - kFree: Symbol('free'), - kConnected: Symbol('connected'), - kClosed: Symbol('closed'), - kNeedDrain: Symbol('need drain'), - kReset: Symbol('reset'), - kDestroyed: Symbol.for('nodejs.stream.destroyed'), - kMaxHeadersSize: Symbol('max headers size'), - kRunningIdx: Symbol('running index'), - kPendingIdx: Symbol('pending index'), - kError: Symbol('error'), - kClients: Symbol('clients'), - kClient: Symbol('client'), - kParser: Symbol('parser'), - kOnDestroyed: Symbol('destroy callbacks'), - kPipelining: Symbol('pipelining'), - kSocket: Symbol('socket'), - kHostHeader: Symbol('host header'), - kConnector: Symbol('connector'), - kStrictContentLength: Symbol('strict content length'), - kMaxRedirections: Symbol('maxRedirections'), - kMaxRequests: Symbol('maxRequestsPerClient'), - kProxy: Symbol('proxy agent options'), - kCounter: Symbol('socket request counter'), - kInterceptors: Symbol('dispatch interceptors'), - kMaxResponseSize: Symbol('max response size'), - kHTTP2Session: Symbol('http2Session'), - kHTTP2SessionState: Symbol('http2Session state'), - kHTTP2BuildRequest: Symbol('http2 build request'), - kHTTP1BuildRequest: Symbol('http1 build request'), - kHTTP2CopyHeaders: Symbol('http2 copy headers'), - kHTTPConnVersion: Symbol('http connection version'), - kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), - kConstruct: Symbol('constructable') +function isEventEmitter (val) { + return val instanceof EventEmitter } +/** + * Determine if stream is fs.ReadStream stream. + * @private + */ -/***/ }), - -/***/ 3983: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function isFsReadStream (stream) { + return stream instanceof ReadStream +} -"use strict"; +/** + * Determine if stream is Zlib stream. + * @private + */ +function isZlibStream (stream) { + return stream instanceof Zlib.Gzip || + stream instanceof Zlib.Gunzip || + stream instanceof Zlib.Deflate || + stream instanceof Zlib.DeflateRaw || + stream instanceof Zlib.Inflate || + stream instanceof Zlib.InflateRaw || + stream instanceof Zlib.Unzip +} -const assert = __nccwpck_require__(9491) -const { kDestroyed, kBodyUsed } = __nccwpck_require__(2785) -const { IncomingMessage } = __nccwpck_require__(3685) -const stream = __nccwpck_require__(2781) -const net = __nccwpck_require__(1808) -const { InvalidArgumentError } = __nccwpck_require__(8045) -const { Blob } = __nccwpck_require__(4300) -const nodeUtil = __nccwpck_require__(3837) -const { stringify } = __nccwpck_require__(3477) -const { headerNameLowerCasedRecord } = __nccwpck_require__(4462) +/** + * No-op function. + * @private + */ -const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) +function noop () {} -function nop () {} +/** + * On drain handler to clear binding. + * @private + */ -function isStream (obj) { - return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function' +// istanbul ignore next: node.js 0.8 +function onDrainClearBinding () { + this._binding.clear() } -// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License) -function isBlobLike (object) { - return (Blob && object instanceof Blob) || ( - object && - typeof object === 'object' && - (typeof object.stream === 'function' || - typeof object.arrayBuffer === 'function') && - /^(Blob|File)$/.test(object[Symbol.toStringTag]) - ) +/** + * On open handler to close stream. + * @private + */ + +function onOpenClose () { + if (typeof this.fd === 'number') { + // actually close down the fd + this.close() + } } -function buildURL (url, queryParams) { - if (url.includes('?') || url.includes('#')) { - throw new Error('Query params cannot be passed when url already contains "?" or "#".') + +/***/ }), + +/***/ 25211: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Copyright (c) 2006, 2008 Tony Garnock-Jones +// Copyright (c) 2006, 2008 LShift Ltd. +// +// Permission is hereby granted, free of charge, to any person +// obtaining a copy of this software and associated documentation files +// (the "Software"), to deal in the Software without restriction, +// including without limitation the rights to use, copy, modify, merge, +// publish, distribute, sublicense, and/or sell copies of the Software, +// and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be +// included in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +// BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +var onp = __nccwpck_require__(18101); + +function longestCommonSubsequence(file1, file2) { + var diff = new onp(file1, file2); + diff.compose(); + var ses = diff.getses(); + + var root; + var prev; + var file1RevIdx = file1.length - 1, + file2RevIdx = file2.length - 1; + for (var i = ses.length - 1; i >= 0; --i) { + if (ses[i].t === diff.SES_COMMON) { + if (prev) { + prev.chain = { + file1index: file1RevIdx, + file2index: file2RevIdx, + chain: null + }; + prev = prev.chain; + } else { + root = { + file1index: file1RevIdx, + file2index: file2RevIdx, + chain: null + }; + prev = root; + } + file1RevIdx--; + file2RevIdx--; + } else if (ses[i].t === diff.SES_DELETE) { + file1RevIdx--; + } else if (ses[i].t === diff.SES_ADD) { + file2RevIdx--; + } } - const stringified = stringify(queryParams) + var tail = { + file1index: -1, + file2index: -1, + chain: null + }; - if (stringified) { - url += '?' + stringified + if (!prev) { + return tail; } - return url + prev.chain = tail; + + return root; } -function parseURL (url) { - if (typeof url === 'string') { - url = new URL(url) +function diffIndices(file1, file2) { + // We apply the LCS to give a simple representation of the + // offsets and lengths of mismatched chunks in the input + // files. This is used by diff3_merge_indices below. - if (!/^https?:/.test(url.origin || url.protocol)) { - throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') - } + var result = []; + var tail1 = file1.length; + var tail2 = file2.length; - return url - } + for (var candidate = longestCommonSubsequence(file1, file2); candidate !== null; candidate = candidate.chain) { + var mismatchLength1 = tail1 - candidate.file1index - 1; + var mismatchLength2 = tail2 - candidate.file2index - 1; + tail1 = candidate.file1index; + tail2 = candidate.file2index; - if (!url || typeof url !== 'object') { - throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.') + if (mismatchLength1 || mismatchLength2) { + result.push({ + file1: [tail1 + 1, mismatchLength1], + file2: [tail2 + 1, mismatchLength2] + }); + } } - if (!/^https?:/.test(url.origin || url.protocol)) { - throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') - } + result.reverse(); + return result; +} - if (!(url instanceof URL)) { - if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) { - throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.') - } +function diff3MergeIndices(a, o, b) { + // Given three files, A, O, and B, where both A and B are + // independently derived from O, returns a fairly complicated + // internal representation of merge decisions it's taken. The + // interested reader may wish to consult + // + // Sanjeev Khanna, Keshav Kunal, and Benjamin C. Pierce. "A + // Formal Investigation of Diff3." In Arvind and Prasad, + // editors, Foundations of Software Technology and Theoretical + // Computer Science (FSTTCS), December 2007. + // + // (http://www.cis.upenn.edu/~bcpierce/papers/diff3-short.pdf) + var i; - if (url.path != null && typeof url.path !== 'string') { - throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.') - } + var m1 = diffIndices(o, a); + var m2 = diffIndices(o, b); - if (url.pathname != null && typeof url.pathname !== 'string') { - throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.') - } + var hunks = []; - if (url.hostname != null && typeof url.hostname !== 'string') { - throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.') - } + function addHunk(h, side) { + hunks.push([h.file1[0], side, h.file1[1], h.file2[0], h.file2[1]]); + } + for (i = 0; i < m1.length; i++) { + addHunk(m1[i], 0); + } + for (i = 0; i < m2.length; i++) { + addHunk(m2[i], 2); + } + hunks.sort(function(x, y) { + return x[0] - y[0] + }); - if (url.origin != null && typeof url.origin !== 'string') { - throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.') - } + var result = []; + var commonOffset = 0; - const port = url.port != null - ? url.port - : (url.protocol === 'https:' ? 443 : 80) - let origin = url.origin != null - ? url.origin - : `${url.protocol}//${url.hostname}:${port}` - let path = url.path != null - ? url.path - : `${url.pathname || ''}${url.search || ''}` + function copyCommon(targetOffset) { + if (targetOffset > commonOffset) { + result.push([1, commonOffset, targetOffset - commonOffset]); + commonOffset = targetOffset; + } + } - if (origin.endsWith('/')) { - origin = origin.substring(0, origin.length - 1) + for (var hunkIndex = 0; hunkIndex < hunks.length; hunkIndex++) { + var firstHunkIndex = hunkIndex; + var hunk = hunks[hunkIndex]; + var regionLhs = hunk[0]; + var regionRhs = regionLhs + hunk[2]; + while (hunkIndex < hunks.length - 1) { + var maybeOverlapping = hunks[hunkIndex + 1]; + var maybeLhs = maybeOverlapping[0]; + if (maybeLhs > regionRhs) break; + regionRhs = Math.max(regionRhs, maybeLhs + maybeOverlapping[2]); + hunkIndex++; } - if (path && !path.startsWith('/')) { - path = `/${path}` + copyCommon(regionLhs); + if (firstHunkIndex == hunkIndex) { + // The "overlap" was only one hunk long, meaning that + // there's no conflict here. Either a and o were the + // same, or b and o were the same. + if (hunk[4] > 0) { + result.push([hunk[1], hunk[3], hunk[4]]); + } + } else { + // A proper conflict. Determine the extents of the + // regions involved from a, o and b. Effectively merge + // all the hunks on the left into one giant hunk, and + // do the same for the right; then, correct for skew + // in the regions of o that each side changed, and + // report appropriate spans for the three sides. + var regions = { + 0: [a.length, -1, o.length, -1], + 2: [b.length, -1, o.length, -1] + }; + for (i = firstHunkIndex; i <= hunkIndex; i++) { + hunk = hunks[i]; + var side = hunk[1]; + var r = regions[side]; + var oLhs = hunk[0]; + var oRhs = oLhs + hunk[2]; + var abLhs = hunk[3]; + var abRhs = abLhs + hunk[4]; + r[0] = Math.min(abLhs, r[0]); + r[1] = Math.max(abRhs, r[1]); + r[2] = Math.min(oLhs, r[2]); + r[3] = Math.max(oRhs, r[3]); + } + var aLhs = regions[0][0] + (regionLhs - regions[0][2]); + var aRhs = regions[0][1] + (regionRhs - regions[0][3]); + var bLhs = regions[2][0] + (regionLhs - regions[2][2]); + var bRhs = regions[2][1] + (regionRhs - regions[2][3]); + result.push([-1, + aLhs, aRhs - aLhs, + regionLhs, regionRhs - regionLhs, + bLhs, bRhs - bLhs + ]); } - // new URL(path, origin) is unsafe when `path` contains an absolute URL - // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL: - // If first parameter is a relative URL, second param is required, and will be used as the base URL. - // If first parameter is an absolute URL, a given second param will be ignored. - url = new URL(origin + path) + commonOffset = regionRhs; } - return url + copyCommon(o.length); + return result; } -function parseOrigin (url) { - url = parseURL(url) +function diff3Merge(a, o, b) { + // Applies the output of Diff.diff3_merge_indices to actually + // construct the merged file; the returned result alternates + // between "ok" and "conflict" blocks. - if (url.pathname !== '/' || url.search || url.hash) { - throw new InvalidArgumentError('invalid url') - } + var result = []; + var files = [a, o, b]; + var indices = diff3MergeIndices(a, o, b); - return url -} + var okLines = []; -function getHostname (host) { - if (host[0] === '[') { - const idx = host.indexOf(']') + function flushOk() { + if (okLines.length) { + result.push({ + ok: okLines + }); + } + okLines = []; + } - assert(idx !== -1) - return host.substring(1, idx) + function pushOk(xs) { + for (var j = 0; j < xs.length; j++) { + okLines.push(xs[j]); + } } - const idx = host.indexOf(':') - if (idx === -1) return host + function isTrueConflict(rec) { + if (rec[2] != rec[6]) return true; + var aoff = rec[1]; + var boff = rec[5]; + for (var j = 0; j < rec[2]; j++) { + if (a[j + aoff] != b[j + boff]) return true; + } + return false; + } - return host.substring(0, idx) + for (var i = 0; i < indices.length; i++) { + var x = indices[i]; + var side = x[0]; + if (side == -1) { + if (!isTrueConflict(x)) { + pushOk(files[0].slice(x[1], x[1] + x[2])); + } else { + flushOk(); + result.push({ + conflict: { + a: a.slice(x[1], x[1] + x[2]), + aIndex: x[1], + o: o.slice(x[3], x[3] + x[4]), + oIndex: x[3], + b: b.slice(x[5], x[5] + x[6]), + bIndex: x[5] + } + }); + } + } else { + pushOk(files[side].slice(x[1], x[1] + x[2])); + } + } + + flushOk(); + return result; } -// IP addresses are not valid server names per RFC6066 -// > Currently, the only server names supported are DNS hostnames -function getServerName (host) { - if (!host) { - return null - } +module.exports = diff3Merge; - assert.strictEqual(typeof host, 'string') - const servername = getHostname(host) - if (net.isIP(servername)) { - return '' - } +/***/ }), - return servername -} +/***/ 18101: +/***/ ((module) => { -function deepClone (obj) { - return JSON.parse(JSON.stringify(obj)) -} +/* + * URL: https://github.com/cubicdaiya/onp + * + * Copyright (c) 2013 Tatsuhiko Kubo + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ -function isAsyncIterable (obj) { - return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function') -} +/** + * The algorithm implemented here is based on "An O(NP) Sequence Comparison Algorithm" + * by described by Sun Wu, Udi Manber and Gene Myers +*/ +module.exports = function (a_, b_) { + var a = a_, + b = b_, + m = a.length, + n = b.length, + reverse = false, + ed = null, + offset = m + 1, + path = [], + pathposi = [], + ses = [], + lcs = "", + SES_DELETE = -1, + SES_COMMON = 0, + SES_ADD = 1; + + var tmp1, + tmp2; + + var init = function () { + if (m >= n) { + tmp1 = a; + tmp2 = m; + a = b; + b = tmp1; + m = n; + n = tmp2; + reverse = true; + offset = m + 1; + } + }; -function isIterable (obj) { - return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function')) -} + var P = function (x, y, k) { + return { + 'x' : x, + 'y' : y, + 'k' : k, + }; + }; -function bodyLength (body) { - if (body == null) { - return 0 - } else if (isStream(body)) { - const state = body._readableState - return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length) - ? state.length - : null - } else if (isBlobLike(body)) { - return body.size != null ? body.size : null - } else if (isBuffer(body)) { - return body.byteLength - } + var seselem = function (elem, t) { + return { + 'elem' : elem, + 't' : t, + }; + }; - return null -} + var snake = function (k, p, pp) { + var r, x, y; + if (p > pp) { + r = path[k-1+offset]; + } else { + r = path[k+1+offset]; + } -function isDestroyed (stream) { - return !stream || !!(stream.destroyed || stream[kDestroyed]) -} + y = Math.max(p, pp); + x = y - k; + while (x < m && y < n && a[x] === b[y]) { + ++x; + ++y; + } -function isReadableAborted (stream) { - const state = stream && stream._readableState - return isDestroyed(stream) && state && !state.endEmitted -} + path[k+offset] = pathposi.length; + pathposi[pathposi.length] = new P(x, y, r); + return y; + }; -function destroy (stream, err) { - if (stream == null || !isStream(stream) || isDestroyed(stream)) { - return - } + var recordseq = function (epc) { + var x_idx, y_idx, px_idx, py_idx, i; + x_idx = y_idx = 1; + px_idx = py_idx = 0; + for (i=epc.length-1;i>=0;--i) { + while(px_idx < epc[i].x || py_idx < epc[i].y) { + if (epc[i].y - epc[i].x > py_idx - px_idx) { + if (reverse) { + ses[ses.length] = new seselem(b[py_idx], SES_DELETE); + } else { + ses[ses.length] = new seselem(b[py_idx], SES_ADD); + } + ++y_idx; + ++py_idx; + } else if (epc[i].y - epc[i].x < py_idx - px_idx) { + if (reverse) { + ses[ses.length] = new seselem(a[px_idx], SES_ADD); + } else { + ses[ses.length] = new seselem(a[px_idx], SES_DELETE); + } + ++x_idx; + ++px_idx; + } else { + ses[ses.length] = new seselem(a[px_idx], SES_COMMON); + lcs += a[px_idx]; + ++x_idx; + ++y_idx; + ++px_idx; + ++py_idx; + } + } + } + }; - if (typeof stream.destroy === 'function') { - if (Object.getPrototypeOf(stream).constructor === IncomingMessage) { - // See: https://github.com/nodejs/node/pull/38505/files - stream.socket = null - } + init(); - stream.destroy(err) - } else if (err) { - process.nextTick((stream, err) => { - stream.emit('error', err) - }, stream, err) - } + return { + SES_DELETE : -1, + SES_COMMON : 0, + SES_ADD : 1, + editdistance : function () { + return ed; + }, + getlcs : function () { + return lcs; + }, + getses : function () { + return ses; + }, + compose : function () { + var delta, size, fp, p, r, epc, i, k; + delta = n - m; + size = m + n + 3; + fp = {}; + for (i=0;i=delta+1;--k) { + fp[k+offset] = snake(k, fp[k-1+offset]+1, fp[k+1+offset]); + } + fp[delta+offset] = snake(delta, fp[delta-1+offset]+1, fp[delta+1+offset]); + } while (fp[delta+offset] !== n); - if (stream.destroyed !== true) { - stream[kDestroyed] = true - } -} + ed = delta + 2 * p; -const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/ -function parseKeepAliveTimeout (val) { - const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR) - return m ? parseInt(m[1], 10) * 1000 : null -} + r = path[delta+offset]; -/** - * Retrieves a header name and returns its lowercase value. - * @param {string | Buffer} value Header name - * @returns {string} - */ -function headerNameToString (value) { - return headerNameLowerCasedRecord[value] || value.toLowerCase() -} + epc = []; + while (r !== -1) { + epc[epc.length] = new P(pathposi[r].x, pathposi[r].y, null); + r = pathposi[r].k; + } + recordseq(epc); + } + }; +}; -function parseHeaders (headers, obj = {}) { - // For H2 support - if (!Array.isArray(headers)) return headers - for (let i = 0; i < headers.length; i += 2) { - const key = headers[i].toString().toLowerCase() - let val = obj[key] +/***/ }), - if (!val) { - if (Array.isArray(headers[i + 1])) { - obj[key] = headers[i + 1].map(x => x.toString('utf8')) - } else { - obj[key] = headers[i + 1].toString('utf8') - } - } else { - if (!Array.isArray(val)) { - val = [val] - obj[key] = val - } - val.push(headers[i + 1].toString('utf8')) +/***/ 77774: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const crypto = __nccwpck_require__(6113); + +const AUTH_KEY_VALUE_RE = /(\w+)=["']?([^'"]{1,10000})["']?/; +let NC = 0; +const NC_PAD = '00000000'; + +function md5(text) { + return crypto.createHash('md5').update(text).digest('hex'); +} + +function digestAuthHeader(method, uri, wwwAuthenticate, userpass) { + const parts = wwwAuthenticate.split(','); + const opts = {}; + for (let i = 0; i < parts.length; i++) { + const m = AUTH_KEY_VALUE_RE.exec(parts[i]); + if (m) { + opts[m[1]] = m[2].replace(/["']/g, ''); } } - // See https://github.com/nodejs/node/pull/46528 - if ('content-length' in obj && 'content-disposition' in obj) { - obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1') + if (!opts.realm || !opts.nonce) { + return ''; } - return obj -} + let qop = opts.qop || ''; -function parseRawHeaders (headers) { - const ret = [] - let hasContentLength = false - let contentDispositionIdx = -1 + // WWW-Authenticate: Digest realm="testrealm@host.com", + // qop="auth,auth-int", + // nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", + // opaque="5ccc069c403ebaf9f0171e9517f40e41" + // Authorization: Digest username="Mufasa", + // realm="testrealm@host.com", + // nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", + // uri="/dir/index.html", + // qop=auth, + // nc=00000001, + // cnonce="0a4f113b", + // response="6629fae49393a05397450978507c4ef1", + // opaque="5ccc069c403ebaf9f0171e9517f40e41" + // HA1 = MD5( "Mufasa:testrealm@host.com:Circle Of Life" ) + // = 939e7578ed9e3c518a452acee763bce9 + // + // HA2 = MD5( "GET:/dir/index.html" ) + // = 39aff3a2bab6126f332b942af96d3366 + // + // Response = MD5( "939e7578ed9e3c518a452acee763bce9:\ + // dcd98b7102dd2f0e8b11d0f600bfb0c093:\ + // 00000001:0a4f113b:auth:\ + // 39aff3a2bab6126f332b942af96d3366" ) + // = 6629fae49393a05397450978507c4ef1 + userpass = userpass.split(':'); - for (let n = 0; n < headers.length; n += 2) { - const key = headers[n + 0].toString() - const val = headers[n + 1].toString('utf8') + let nc = String(++NC); + nc = NC_PAD.substring(nc.length) + nc; + const cnonce = crypto.randomBytes(8).toString('hex'); - if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) { - ret.push(key, val) - hasContentLength = true - } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) { - contentDispositionIdx = ret.push(key, val) - 1 - } else { - ret.push(key, val) - } + const ha1 = md5(userpass[0] + ':' + opts.realm + ':' + userpass[1]); + const ha2 = md5(method.toUpperCase() + ':' + uri); + let s = ha1 + ':' + opts.nonce; + if (qop) { + qop = qop.split(',')[0]; + s += ':' + nc + ':' + cnonce + ':' + qop; } - - // See https://github.com/nodejs/node/pull/46528 - if (hasContentLength && contentDispositionIdx !== -1) { - ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1') + s += ':' + ha2; + const response = md5(s); + let authstring = 'Digest username="' + userpass[0] + '", realm="' + opts.realm + + '", nonce="' + opts.nonce + '", uri="' + uri + + '", response="' + response + '"'; + if (opts.opaque) { + authstring += ', opaque="' + opts.opaque + '"'; } - - return ret + if (qop) { + authstring +=', qop=' + qop + ', nc=' + nc + ', cnonce="' + cnonce + '"'; + } + return authstring; } -function isBuffer (buffer) { - // See, https://github.com/mcollina/undici/pull/319 - return buffer instanceof Uint8Array || Buffer.isBuffer(buffer) -} +module.exports = digestAuthHeader; -function validateHandler (handler, method, upgrade) { - if (!handler || typeof handler !== 'object') { - throw new InvalidArgumentError('handler must be an object') - } - if (typeof handler.onConnect !== 'function') { - throw new InvalidArgumentError('invalid onConnect method') - } +/***/ }), - if (typeof handler.onError !== 'function') { - throw new InvalidArgumentError('invalid onError method') - } +/***/ 14401: +/***/ ((module) => { - if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) { - throw new InvalidArgumentError('invalid onBodySent method') - } +"use strict"; +/*! + * ee-first + * Copyright(c) 2014 Jonathan Ong + * MIT Licensed + */ - if (upgrade || method === 'CONNECT') { - if (typeof handler.onUpgrade !== 'function') { - throw new InvalidArgumentError('invalid onUpgrade method') - } - } else { - if (typeof handler.onHeaders !== 'function') { - throw new InvalidArgumentError('invalid onHeaders method') - } - if (typeof handler.onData !== 'function') { - throw new InvalidArgumentError('invalid onData method') - } - if (typeof handler.onComplete !== 'function') { - throw new InvalidArgumentError('invalid onComplete method') - } - } -} +/** + * Module exports. + * @public + */ -// A body is disturbed if it has been read from and it cannot -// be re-used without losing state or data. -function isDisturbed (body) { - return !!(body && ( - stream.isDisturbed - ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed? - : body[kBodyUsed] || - body.readableDidRead || - (body._readableState && body._readableState.dataEmitted) || - isReadableAborted(body) - )) -} +module.exports = first -function isErrored (body) { - return !!(body && ( - stream.isErrored - ? stream.isErrored(body) - : /state: 'errored'/.test(nodeUtil.inspect(body) - ))) -} +/** + * Get the first event in a set of event emitters and event pairs. + * + * @param {array} stuff + * @param {function} done + * @public + */ -function isReadable (body) { - return !!(body && ( - stream.isReadable - ? stream.isReadable(body) - : /state: 'readable'/.test(nodeUtil.inspect(body) - ))) -} +function first(stuff, done) { + if (!Array.isArray(stuff)) + throw new TypeError('arg must be an array of [ee, events...] arrays') -function getSocketInfo (socket) { - return { - localAddress: socket.localAddress, - localPort: socket.localPort, - remoteAddress: socket.remoteAddress, - remotePort: socket.remotePort, - remoteFamily: socket.remoteFamily, - timeout: socket.timeout, - bytesWritten: socket.bytesWritten, - bytesRead: socket.bytesRead + var cleanups = [] + + for (var i = 0; i < stuff.length; i++) { + var arr = stuff[i] + + if (!Array.isArray(arr) || arr.length < 2) + throw new TypeError('each array member must be [ee, events...]') + + var ee = arr[0] + + for (var j = 1; j < arr.length; j++) { + var event = arr[j] + var fn = listener(event, callback) + + // listen to the event + ee.on(event, fn) + // push this listener to the list of cleanups + cleanups.push({ + ee: ee, + event: event, + fn: fn, + }) + } } -} -async function * convertIterableToBuffer (iterable) { - for await (const chunk of iterable) { - yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + function callback() { + cleanup() + done.apply(null, arguments) } -} -let ReadableStream -function ReadableStreamFrom (iterable) { - if (!ReadableStream) { - ReadableStream = (__nccwpck_require__(5356).ReadableStream) + function cleanup() { + var x + for (var i = 0; i < cleanups.length; i++) { + x = cleanups[i] + x.ee.removeListener(x.event, x.fn) + } } - if (ReadableStream.from) { - return ReadableStream.from(convertIterableToBuffer(iterable)) + function thunk(fn) { + done = fn } - let iterator - return new ReadableStream( - { - async start () { - iterator = iterable[Symbol.asyncIterator]() - }, - async pull (controller) { - const { done, value } = await iterator.next() - if (done) { - queueMicrotask(() => { - controller.close() - }) - } else { - const buf = Buffer.isBuffer(value) ? value : Buffer.from(value) - controller.enqueue(new Uint8Array(buf)) - } - return controller.desiredSize > 0 - }, - async cancel (reason) { - await iterator.return() - } - }, - 0 - ) -} + thunk.cancel = cleanup -// The chunk should be a FormData instance and contains -// all the required methods. -function isFormDataLike (object) { - return ( - object && - typeof object === 'object' && - typeof object.append === 'function' && - typeof object.delete === 'function' && - typeof object.get === 'function' && - typeof object.getAll === 'function' && - typeof object.has === 'function' && - typeof object.set === 'function' && - object[Symbol.toStringTag] === 'FormData' - ) + return thunk } -function throwIfAborted (signal) { - if (!signal) { return } - if (typeof signal.throwIfAborted === 'function') { - signal.throwIfAborted() - } else { - if (signal.aborted) { - // DOMException not available < v17.0.0 - const err = new Error('The operation was aborted') - err.name = 'AbortError' - throw err +/** + * Create the event listener. + * @private + */ + +function listener(event, done) { + return function onevent(arg1) { + var args = new Array(arguments.length) + var ee = this + var err = event === 'error' + ? arg1 + : null + + // copy args to prevent arguments escaping scope + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] } - } -} -function addAbortListener (signal, listener) { - if ('addEventListener' in signal) { - signal.addEventListener('abort', listener, { once: true }) - return () => signal.removeEventListener('abort', listener) + done(err, ee, event, args) } - signal.addListener('abort', listener) - return () => signal.removeListener('abort', listener) } -const hasToWellFormed = !!String.prototype.toWellFormed -/** - * @param {string} val - */ -function toUSVString (val) { - if (hasToWellFormed) { - return `${val}`.toWellFormed() - } else if (nodeUtil.toUSVString) { - return nodeUtil.toUSVString(val) - } +/***/ }), - return `${val}` -} +/***/ 81205: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// Parsed accordingly to RFC 9110 -// https://www.rfc-editor.org/rfc/rfc9110#field.content-range -function parseRangeHeader (range) { - if (range == null || range === '') return { start: 0, end: null, size: null } +var once = __nccwpck_require__(1223); - const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null - return m - ? { - start: parseInt(m[1]), - end: m[2] ? parseInt(m[2]) : null, - size: m[3] ? parseInt(m[3]) : null - } - : null -} +var noop = function() {}; -const kEnumerableProperty = Object.create(null) -kEnumerableProperty.enumerable = true +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; -module.exports = { - kEnumerableProperty, - nop, - isDisturbed, - isErrored, - isReadable, - toUSVString, - isReadableAborted, - isBlobLike, - parseOrigin, - parseURL, - getServerName, - isStream, - isIterable, - isAsyncIterable, - isDestroyed, - headerNameToString, - parseRawHeaders, - parseHeaders, - parseKeepAliveTimeout, - destroy, - bodyLength, - deepClone, - ReadableStreamFrom, - isBuffer, - validateHandler, - getSocketInfo, - isFormDataLike, - buildURL, - throwIfAborted, - addAbortListener, - parseRangeHeader, - nodeMajor, - nodeMinor, - nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), - safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] -} +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; /***/ }), -/***/ 4839: +/***/ 6123: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const Dispatcher = __nccwpck_require__(412) -const { - ClientDestroyedError, - ClientClosedError, - InvalidArgumentError -} = __nccwpck_require__(8045) -const { kDestroy, kClose, kDispatch, kInterceptors } = __nccwpck_require__(2785) +var GetIntrinsic = __nccwpck_require__(74538); -const kDestroyed = Symbol('destroyed') -const kClosed = Symbol('closed') -const kOnDestroyed = Symbol('onDestroyed') -const kOnClosed = Symbol('onClosed') -const kInterceptedDispatch = Symbol('Intercepted Dispatch') +/** @type {import('.')} */ +var $defineProperty = GetIntrinsic('%Object.defineProperty%', true) || false; +if ($defineProperty) { + try { + $defineProperty({}, 'a', { value: 1 }); + } catch (e) { + // IE 8 has a broken defineProperty + $defineProperty = false; + } +} -class DispatcherBase extends Dispatcher { - constructor () { - super() +module.exports = $defineProperty; - this[kDestroyed] = false - this[kOnDestroyed] = null - this[kClosed] = false - this[kOnClosed] = [] - } - get destroyed () { - return this[kDestroyed] - } +/***/ }), - get closed () { - return this[kClosed] - } +/***/ 91933: +/***/ ((module) => { - get interceptors () { - return this[kInterceptors] - } +"use strict"; - set interceptors (newInterceptors) { - if (newInterceptors) { - for (let i = newInterceptors.length - 1; i >= 0; i--) { - const interceptor = this[kInterceptors][i] - if (typeof interceptor !== 'function') { - throw new InvalidArgumentError('interceptor must be an function') - } - } - } - this[kInterceptors] = newInterceptors - } +/** @type {import('./eval')} */ +module.exports = EvalError; - close (callback) { - if (callback === undefined) { - return new Promise((resolve, reject) => { - this.close((err, data) => { - return err ? reject(err) : resolve(data) - }) - }) - } - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } +/***/ }), - if (this[kDestroyed]) { - queueMicrotask(() => callback(new ClientDestroyedError(), null)) - return - } +/***/ 28015: +/***/ ((module) => { - if (this[kClosed]) { - if (this[kOnClosed]) { - this[kOnClosed].push(callback) - } else { - queueMicrotask(() => callback(null, null)) - } - return - } +"use strict"; - this[kClosed] = true - this[kOnClosed].push(callback) - const onClosed = () => { - const callbacks = this[kOnClosed] - this[kOnClosed] = null - for (let i = 0; i < callbacks.length; i++) { - callbacks[i](null, null) - } - } +/** @type {import('.')} */ +module.exports = Error; - // Should not error. - this[kClose]() - .then(() => this.destroy()) - .then(() => { - queueMicrotask(onClosed) - }) - } - destroy (err, callback) { - if (typeof err === 'function') { - callback = err - err = null - } +/***/ }), - if (callback === undefined) { - return new Promise((resolve, reject) => { - this.destroy(err, (err, data) => { - return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data) - }) - }) - } +/***/ 54415: +/***/ ((module) => { - if (typeof callback !== 'function') { - throw new InvalidArgumentError('invalid callback') - } +"use strict"; - if (this[kDestroyed]) { - if (this[kOnDestroyed]) { - this[kOnDestroyed].push(callback) - } else { - queueMicrotask(() => callback(null, null)) - } - return - } - if (!err) { - err = new ClientDestroyedError() - } +/** @type {import('./range')} */ +module.exports = RangeError; - this[kDestroyed] = true - this[kOnDestroyed] = this[kOnDestroyed] || [] - this[kOnDestroyed].push(callback) - const onDestroyed = () => { - const callbacks = this[kOnDestroyed] - this[kOnDestroyed] = null - for (let i = 0; i < callbacks.length; i++) { - callbacks[i](null, null) - } - } +/***/ }), - // Should not error. - this[kDestroy](err).then(() => { - queueMicrotask(onDestroyed) - }) - } +/***/ 46279: +/***/ ((module) => { - [kInterceptedDispatch] (opts, handler) { - if (!this[kInterceptors] || this[kInterceptors].length === 0) { - this[kInterceptedDispatch] = this[kDispatch] - return this[kDispatch](opts, handler) - } +"use strict"; - let dispatch = this[kDispatch].bind(this) - for (let i = this[kInterceptors].length - 1; i >= 0; i--) { - dispatch = this[kInterceptors][i](dispatch) - } - this[kInterceptedDispatch] = dispatch - return dispatch(opts, handler) - } - dispatch (opts, handler) { - if (!handler || typeof handler !== 'object') { - throw new InvalidArgumentError('handler must be an object') - } +/** @type {import('./ref')} */ +module.exports = ReferenceError; - try { - if (!opts || typeof opts !== 'object') { - throw new InvalidArgumentError('opts must be an object.') - } - if (this[kDestroyed] || this[kOnDestroyed]) { - throw new ClientDestroyedError() - } +/***/ }), - if (this[kClosed]) { - throw new ClientClosedError() - } +/***/ 75474: +/***/ ((module) => { - return this[kInterceptedDispatch](opts, handler) - } catch (err) { - if (typeof handler.onError !== 'function') { - throw new InvalidArgumentError('invalid onError method') - } +"use strict"; - handler.onError(err) - return false - } - } -} +/** @type {import('./syntax')} */ +module.exports = SyntaxError; + + +/***/ }), + +/***/ 6361: +/***/ ((module) => { + +"use strict"; -module.exports = DispatcherBase + +/** @type {import('./type')} */ +module.exports = TypeError; /***/ }), -/***/ 412: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 5065: +/***/ ((module) => { "use strict"; -const EventEmitter = __nccwpck_require__(2361) +/** @type {import('./uri')} */ +module.exports = URIError; -class Dispatcher extends EventEmitter { - dispatch () { - throw new Error('not implemented') - } - close () { - throw new Error('not implemented') - } +/***/ }), - destroy () { - throw new Error('not implemented') +/***/ 94070: +/***/ ((module) => { + +"use strict"; +/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */ + + + +/** + * Module variables. + * @private + */ + +var matchHtmlRegExp = /["'&<>]/; + +/** + * Module exports. + * @public + */ + +module.exports = escapeHtml; + +/** + * Escape special characters in the given string of html. + * + * @param {string} string The string to escape for inserting into HTML + * @return {string} + * @public + */ + +function escapeHtml(string) { + var str = '' + string; + var match = matchHtmlRegExp.exec(str); + + if (!match) { + return str; + } + + var escape; + var html = ''; + var index = 0; + var lastIndex = 0; + + for (index = match.index; index < str.length; index++) { + switch (str.charCodeAt(index)) { + case 34: // " + escape = '"'; + break; + case 38: // & + escape = '&'; + break; + case 39: // ' + escape = '''; + break; + case 60: // < + escape = '<'; + break; + case 62: // > + escape = '>'; + break; + default: + continue; + } + + if (lastIndex !== index) { + html += str.substring(lastIndex, index); + } + + lastIndex = index + 1; + html += escape; } -} -module.exports = Dispatcher + return lastIndex !== index + ? html + str.substring(lastIndex, index) + : html; +} /***/ }), -/***/ 1472: +/***/ 67512: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const Busboy = __nccwpck_require__(727) -const util = __nccwpck_require__(3983) -const { - ReadableStreamFrom, - isBlobLike, - isReadableStreamLike, - readableStreamClose, - createDeferredPromise, - fullyReadBody -} = __nccwpck_require__(2538) -const { FormData } = __nccwpck_require__(2015) -const { kState } = __nccwpck_require__(5861) -const { webidl } = __nccwpck_require__(1744) -const { DOMException, structuredClone } = __nccwpck_require__(1037) -const { Blob, File: NativeFile } = __nccwpck_require__(4300) -const { kBodyUsed } = __nccwpck_require__(2785) -const assert = __nccwpck_require__(9491) -const { isErrored } = __nccwpck_require__(3983) -const { isUint8Array, isArrayBuffer } = __nccwpck_require__(9830) -const { File: UndiciFile } = __nccwpck_require__(8511) -const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) +var isObject = __nccwpck_require__(70429); -let ReadableStream = globalThis.ReadableStream +module.exports = function extend(o/*, objects*/) { + if (!isObject(o)) { o = {}; } -/** @type {globalThis['File']} */ -const File = NativeFile ?? UndiciFile -const textEncoder = new TextEncoder() -const textDecoder = new TextDecoder() + var len = arguments.length; + for (var i = 1; i < len; i++) { + var obj = arguments[i]; -// https://fetch.spec.whatwg.org/#concept-bodyinit-extract -function extractBody (object, keepalive = false) { - if (!ReadableStream) { - ReadableStream = (__nccwpck_require__(5356).ReadableStream) + if (isObject(obj)) { + assign(o, obj); + } } + return o; +}; - // 1. Let stream be null. - let stream = null - - // 2. If object is a ReadableStream object, then set stream to object. - if (object instanceof ReadableStream) { - stream = object - } else if (isBlobLike(object)) { - // 3. Otherwise, if object is a Blob object, set stream to the - // result of running object’s get stream. - stream = object.stream() - } else { - // 4. Otherwise, set stream to a new ReadableStream object, and set - // up stream. - stream = new ReadableStream({ - async pull (controller) { - controller.enqueue( - typeof source === 'string' ? textEncoder.encode(source) : source - ) - queueMicrotask(() => readableStreamClose(controller)) - }, - start () {}, - type: undefined - }) +function assign(a, b) { + for (var key in b) { + if (hasOwn(b, key)) { + a[key] = b[key]; + } } +} - // 5. Assert: stream is a ReadableStream object. - assert(isReadableStreamLike(stream)) +/** + * Returns true if the given `key` is an own property of `obj`. + */ - // 6. Let action be null. - let action = null +function hasOwn(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} - // 7. Let source be null. - let source = null - // 8. Let length be null. - let length = null +/***/ }), - // 9. Let type be null. - let type = null +/***/ 31133: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 10. Switch on object: - if (typeof object === 'string') { - // Set source to the UTF-8 encoding of object. - // Note: setting source to a Uint8Array here breaks some mocking assumptions. - source = object +var debug; - // Set type to `text/plain;charset=UTF-8`. - type = 'text/plain;charset=UTF-8' - } else if (object instanceof URLSearchParams) { - // URLSearchParams +module.exports = function () { + if (!debug) { + try { + /* eslint global-require: off */ + debug = __nccwpck_require__(38237)("follow-redirects"); + } + catch (error) { /* */ } + if (typeof debug !== "function") { + debug = function () { /* */ }; + } + } + debug.apply(null, arguments); +}; - // spec says to run application/x-www-form-urlencoded on body.list - // this is implemented in Node.js as apart of an URLSearchParams instance toString method - // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490 - // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100 - // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list. - source = object.toString() +/***/ }), - // Set type to `application/x-www-form-urlencoded;charset=UTF-8`. - type = 'application/x-www-form-urlencoded;charset=UTF-8' - } else if (isArrayBuffer(object)) { - // BufferSource/ArrayBuffer +/***/ 67707: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Set source to a copy of the bytes held by object. - source = new Uint8Array(object.slice()) - } else if (ArrayBuffer.isView(object)) { - // BufferSource/ArrayBufferView +var url = __nccwpck_require__(57310); +var URL = url.URL; +var http = __nccwpck_require__(13685); +var https = __nccwpck_require__(95687); +var Writable = (__nccwpck_require__(12781).Writable); +var assert = __nccwpck_require__(39491); +var debug = __nccwpck_require__(31133); - // Set source to a copy of the bytes held by object. - source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) - } else if (util.isFormDataLike(object)) { - const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` - const prefix = `--${boundary}\r\nContent-Disposition: form-data` +// Whether to use the native URL object or the legacy url module +var useNativeURL = false; +try { + assert(new URL()); +} +catch (error) { + useNativeURL = error.code === "ERR_INVALID_URL"; +} + +// URL fields to preserve in copy operations +var preservedUrlFields = [ + "auth", + "host", + "hostname", + "href", + "path", + "pathname", + "port", + "protocol", + "query", + "search", + "hash", +]; - /*! formdata-polyfill. MIT License. Jimmy Wärting */ - const escape = (str) => - str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22') - const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n') +// Create handlers that pass events from native requests +var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; +var eventHandlers = Object.create(null); +events.forEach(function (event) { + eventHandlers[event] = function (arg1, arg2, arg3) { + this._redirectable.emit(event, arg1, arg2, arg3); + }; +}); - // Set action to this step: run the multipart/form-data - // encoding algorithm, with object’s entry list and UTF-8. - // - This ensures that the body is immutable and can't be changed afterwords - // - That the content-length is calculated in advance. - // - And that all parts are pre-encoded and ready to be sent. +// Error types with codes +var InvalidUrlError = createErrorType( + "ERR_INVALID_URL", + "Invalid URL", + TypeError +); +var RedirectionError = createErrorType( + "ERR_FR_REDIRECTION_FAILURE", + "Redirected request failed" +); +var TooManyRedirectsError = createErrorType( + "ERR_FR_TOO_MANY_REDIRECTS", + "Maximum number of redirects exceeded", + RedirectionError +); +var MaxBodyLengthExceededError = createErrorType( + "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", + "Request body larger than maxBodyLength limit" +); +var WriteAfterEndError = createErrorType( + "ERR_STREAM_WRITE_AFTER_END", + "write after end" +); + +// istanbul ignore next +var destroy = Writable.prototype.destroy || noop; + +// An HTTP(S) request that can be redirected +function RedirectableRequest(options, responseCallback) { + // Initialize the request + Writable.call(this); + this._sanitizeOptions(options); + this._options = options; + this._ended = false; + this._ending = false; + this._redirectCount = 0; + this._redirects = []; + this._requestBodyLength = 0; + this._requestBodyBuffers = []; + + // Attach a callback if passed + if (responseCallback) { + this.on("response", responseCallback); + } + + // React to responses of native requests + var self = this; + this._onNativeResponse = function (response) { + try { + self._processResponse(response); + } + catch (cause) { + self.emit("error", cause instanceof RedirectionError ? + cause : new RedirectionError({ cause: cause })); + } + }; - const blobParts = [] - const rn = new Uint8Array([13, 10]) // '\r\n' - length = 0 - let hasUnknownSizeValue = false + // Perform the first request + this._performRequest(); +} +RedirectableRequest.prototype = Object.create(Writable.prototype); - for (const [name, value] of object) { - if (typeof value === 'string') { - const chunk = textEncoder.encode(prefix + - `; name="${escape(normalizeLinefeeds(name))}"` + - `\r\n\r\n${normalizeLinefeeds(value)}\r\n`) - blobParts.push(chunk) - length += chunk.byteLength - } else { - const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + - (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' + - `Content-Type: ${ - value.type || 'application/octet-stream' - }\r\n\r\n`) - blobParts.push(chunk, value, rn) - if (typeof value.size === 'number') { - length += chunk.byteLength + value.size + rn.byteLength - } else { - hasUnknownSizeValue = true - } - } - } +RedirectableRequest.prototype.abort = function () { + destroyRequest(this._currentRequest); + this._currentRequest.abort(); + this.emit("abort"); +}; - const chunk = textEncoder.encode(`--${boundary}--`) - blobParts.push(chunk) - length += chunk.byteLength - if (hasUnknownSizeValue) { - length = null - } +RedirectableRequest.prototype.destroy = function (error) { + destroyRequest(this._currentRequest, error); + destroy.call(this, error); + return this; +}; - // Set source to object. - source = object +// Writes buffered data to the current native request +RedirectableRequest.prototype.write = function (data, encoding, callback) { + // Writing is not allowed if end has been called + if (this._ending) { + throw new WriteAfterEndError(); + } - action = async function * () { - for (const part of blobParts) { - if (part.stream) { - yield * part.stream() - } else { - yield part - } - } + // Validate input and shift parameters if necessary + if (!isString(data) && !isBuffer(data)) { + throw new TypeError("data should be a string, Buffer or Uint8Array"); + } + if (isFunction(encoding)) { + callback = encoding; + encoding = null; + } + + // Ignore empty buffers, since writing them doesn't invoke the callback + // https://github.com/nodejs/node/issues/22066 + if (data.length === 0) { + if (callback) { + callback(); } + return; + } + // Only write when we don't exceed the maximum body length + if (this._requestBodyLength + data.length <= this._options.maxBodyLength) { + this._requestBodyLength += data.length; + this._requestBodyBuffers.push({ data: data, encoding: encoding }); + this._currentRequest.write(data, encoding, callback); + } + // Error when we exceed the maximum body length + else { + this.emit("error", new MaxBodyLengthExceededError()); + this.abort(); + } +}; - // Set type to `multipart/form-data; boundary=`, - // followed by the multipart/form-data boundary string generated - // by the multipart/form-data encoding algorithm. - type = 'multipart/form-data; boundary=' + boundary - } else if (isBlobLike(object)) { - // Blob +// Ends the current native request +RedirectableRequest.prototype.end = function (data, encoding, callback) { + // Shift parameters if necessary + if (isFunction(data)) { + callback = data; + data = encoding = null; + } + else if (isFunction(encoding)) { + callback = encoding; + encoding = null; + } + + // Write data if needed and end + if (!data) { + this._ended = this._ending = true; + this._currentRequest.end(null, null, callback); + } + else { + var self = this; + var currentRequest = this._currentRequest; + this.write(data, encoding, function () { + self._ended = true; + currentRequest.end(null, null, callback); + }); + this._ending = true; + } +}; - // Set source to object. - source = object +// Sets a header value on the current native request +RedirectableRequest.prototype.setHeader = function (name, value) { + this._options.headers[name] = value; + this._currentRequest.setHeader(name, value); +}; - // Set length to object’s size. - length = object.size +// Clears a header value on the current native request +RedirectableRequest.prototype.removeHeader = function (name) { + delete this._options.headers[name]; + this._currentRequest.removeHeader(name); +}; - // If object’s type attribute is not the empty byte sequence, set - // type to its value. - if (object.type) { - type = object.type - } - } else if (typeof object[Symbol.asyncIterator] === 'function') { - // If keepalive is true, then throw a TypeError. - if (keepalive) { - throw new TypeError('keepalive') +// Global timeout for all underlying requests +RedirectableRequest.prototype.setTimeout = function (msecs, callback) { + var self = this; + + // Destroys the socket on timeout + function destroyOnTimeout(socket) { + socket.setTimeout(msecs); + socket.removeListener("timeout", socket.destroy); + socket.addListener("timeout", socket.destroy); + } + + // Sets up a timer to trigger a timeout event + function startTimer(socket) { + if (self._timeout) { + clearTimeout(self._timeout); } + self._timeout = setTimeout(function () { + self.emit("timeout"); + clearTimer(); + }, msecs); + destroyOnTimeout(socket); + } - // If object is disturbed or locked, then throw a TypeError. - if (util.isDisturbed(object) || object.locked) { - throw new TypeError( - 'Response body object should not be disturbed or locked' - ) + // Stops a timeout from triggering + function clearTimer() { + // Clear the timeout + if (self._timeout) { + clearTimeout(self._timeout); + self._timeout = null; } - stream = - object instanceof ReadableStream ? object : ReadableStreamFrom(object) + // Clean up all attached listeners + self.removeListener("abort", clearTimer); + self.removeListener("error", clearTimer); + self.removeListener("response", clearTimer); + self.removeListener("close", clearTimer); + if (callback) { + self.removeListener("timeout", callback); + } + if (!self.socket) { + self._currentRequest.removeListener("socket", startTimer); + } } - // 11. If source is a byte sequence, then set action to a - // step that returns source and length to source’s length. - if (typeof source === 'string' || util.isBuffer(source)) { - length = Buffer.byteLength(source) + // Attach callback if passed + if (callback) { + this.on("timeout", callback); } - // 12. If action is non-null, then run these steps in in parallel: - if (action != null) { - // Run action. - let iterator - stream = new ReadableStream({ - async start () { - iterator = action(object)[Symbol.asyncIterator]() - }, - async pull (controller) { - const { value, done } = await iterator.next() - if (done) { - // When running action is done, close stream. - queueMicrotask(() => { - controller.close() - }) - } else { - // Whenever one or more bytes are available and stream is not errored, - // enqueue a Uint8Array wrapping an ArrayBuffer containing the available - // bytes into stream. - if (!isErrored(stream)) { - controller.enqueue(new Uint8Array(value)) - } - } - return controller.desiredSize > 0 - }, - async cancel (reason) { - await iterator.return() - }, - type: undefined - }) + // Start the timer if or when the socket is opened + if (this.socket) { + startTimer(this.socket); + } + else { + this._currentRequest.once("socket", startTimer); } - // 13. Let body be a body whose stream is stream, source is source, - // and length is length. - const body = { stream, source, length } + // Clean up on events + this.on("socket", destroyOnTimeout); + this.on("abort", clearTimer); + this.on("error", clearTimer); + this.on("response", clearTimer); + this.on("close", clearTimer); - // 14. Return (body, type). - return [body, type] -} + return this; +}; -// https://fetch.spec.whatwg.org/#bodyinit-safely-extract -function safelyExtractBody (object, keepalive = false) { - if (!ReadableStream) { - // istanbul ignore next - ReadableStream = (__nccwpck_require__(5356).ReadableStream) +// Proxy all other public ClientRequest methods +[ + "flushHeaders", "getHeader", + "setNoDelay", "setSocketKeepAlive", +].forEach(function (method) { + RedirectableRequest.prototype[method] = function (a, b) { + return this._currentRequest[method](a, b); + }; +}); + +// Proxy all public ClientRequest properties +["aborted", "connection", "socket"].forEach(function (property) { + Object.defineProperty(RedirectableRequest.prototype, property, { + get: function () { return this._currentRequest[property]; }, + }); +}); + +RedirectableRequest.prototype._sanitizeOptions = function (options) { + // Ensure headers are always present + if (!options.headers) { + options.headers = {}; } - // To safely extract a body and a `Content-Type` value from - // a byte sequence or BodyInit object object, run these steps: + // Since http.request treats host as an alias of hostname, + // but the url module interprets host as hostname plus port, + // eliminate the host property to avoid confusion. + if (options.host) { + // Use hostname if set, because it has precedence + if (!options.hostname) { + options.hostname = options.host; + } + delete options.host; + } - // 1. If object is a ReadableStream object, then: - if (object instanceof ReadableStream) { - // Assert: object is neither disturbed nor locked. - // istanbul ignore next - assert(!util.isDisturbed(object), 'The body has already been consumed.') - // istanbul ignore next - assert(!object.locked, 'The stream is locked.') + // Complete the URL object when necessary + if (!options.pathname && options.path) { + var searchPos = options.path.indexOf("?"); + if (searchPos < 0) { + options.pathname = options.path; + } + else { + options.pathname = options.path.substring(0, searchPos); + options.search = options.path.substring(searchPos); + } } +}; - // 2. Return the results of extracting object. - return extractBody(object, keepalive) -} -function cloneBody (body) { - // To clone a body body, run these steps: +// Executes the next native request (initial or redirect) +RedirectableRequest.prototype._performRequest = function () { + // Load the native protocol + var protocol = this._options.protocol; + var nativeProtocol = this._options.nativeProtocols[protocol]; + if (!nativeProtocol) { + throw new TypeError("Unsupported protocol " + protocol); + } + + // If specified, use the agent corresponding to the protocol + // (HTTP and HTTPS use different types of agents) + if (this._options.agents) { + var scheme = protocol.slice(0, -1); + this._options.agent = this._options.agents[scheme]; + } + + // Create the native request and set up its event handlers + var request = this._currentRequest = + nativeProtocol.request(this._options, this._onNativeResponse); + request._redirectable = this; + for (var event of events) { + request.on(event, eventHandlers[event]); + } + + // RFC7230§5.3.1: When making a request directly to an origin server, […] + // a client MUST send only the absolute path […] as the request-target. + this._currentUrl = /^\//.test(this._options.path) ? + url.format(this._options) : + // When making a request to a proxy, […] + // a client MUST send the target URI in absolute-form […]. + this._options.path; + + // End a redirected request + // (The first request must be ended explicitly with RedirectableRequest#end) + if (this._isRedirect) { + // Write the request entity and end + var i = 0; + var self = this; + var buffers = this._requestBodyBuffers; + (function writeNext(error) { + // Only write if this request has not been redirected yet + /* istanbul ignore else */ + if (request === self._currentRequest) { + // Report any write errors + /* istanbul ignore if */ + if (error) { + self.emit("error", error); + } + // Write the next buffer if there are still left + else if (i < buffers.length) { + var buffer = buffers[i++]; + /* istanbul ignore else */ + if (!request.finished) { + request.write(buffer.data, buffer.encoding, writeNext); + } + } + // End the request if `end` has been called on us + else if (self._ended) { + request.end(); + } + } + }()); + } +}; - // https://fetch.spec.whatwg.org/#concept-body-clone +// Processes a response from the current native request +RedirectableRequest.prototype._processResponse = function (response) { + // Store the redirected response + var statusCode = response.statusCode; + if (this._options.trackRedirects) { + this._redirects.push({ + url: this._currentUrl, + headers: response.headers, + statusCode: statusCode, + }); + } - // 1. Let « out1, out2 » be the result of teeing body’s stream. - const [out1, out2] = body.stream.tee() - const out2Clone = structuredClone(out2, { transfer: [out2] }) - // This, for whatever reasons, unrefs out2Clone which allows - // the process to exit by itself. - const [, finalClone] = out2Clone.tee() + // RFC7231§6.4: The 3xx (Redirection) class of status code indicates + // that further action needs to be taken by the user agent in order to + // fulfill the request. If a Location header field is provided, + // the user agent MAY automatically redirect its request to the URI + // referenced by the Location field value, + // even if the specific status code is not understood. - // 2. Set body’s stream to out1. - body.stream = out1 + // If the response is not a redirect; return it as-is + var location = response.headers.location; + if (!location || this._options.followRedirects === false || + statusCode < 300 || statusCode >= 400) { + response.responseUrl = this._currentUrl; + response.redirects = this._redirects; + this.emit("response", response); - // 3. Return a body whose stream is out2 and other members are copied from body. - return { - stream: finalClone, - length: body.length, - source: body.source + // Clean up + this._requestBodyBuffers = []; + return; } -} -async function * consumeBody (body) { - if (body) { - if (isUint8Array(body)) { - yield body - } else { - const stream = body.stream + // The response is a redirect, so abort the current request + destroyRequest(this._currentRequest); + // Discard the remainder of the response to avoid waiting for data + response.destroy(); + + // RFC7231§6.4: A client SHOULD detect and intervene + // in cyclical redirections (i.e., "infinite" redirection loops). + if (++this._redirectCount > this._options.maxRedirects) { + throw new TooManyRedirectsError(); + } + + // Store the request headers if applicable + var requestHeaders; + var beforeRedirect = this._options.beforeRedirect; + if (beforeRedirect) { + requestHeaders = Object.assign({ + // The Host header was set by nativeProtocol.request + Host: response.req.getHeader("host"), + }, this._options.headers); + } + + // RFC7231§6.4: Automatic redirection needs to done with + // care for methods not known to be safe, […] + // RFC7231§6.4.2–3: For historical reasons, a user agent MAY change + // the request method from POST to GET for the subsequent request. + var method = this._options.method; + if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" || + // RFC7231§6.4.4: The 303 (See Other) status code indicates that + // the server is redirecting the user agent to a different resource […] + // A user agent can perform a retrieval request targeting that URI + // (a GET or HEAD request if using HTTP) […] + (statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) { + this._options.method = "GET"; + // Drop a possible entity and headers related to it + this._requestBodyBuffers = []; + removeMatchingHeaders(/^content-/i, this._options.headers); + } + + // Drop the Host header, as the redirect might lead to a different host + var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers); + + // If the redirect is relative, carry over the host of the last request + var currentUrlParts = parseUrl(this._currentUrl); + var currentHost = currentHostHeader || currentUrlParts.host; + var currentUrl = /^\w+:/.test(location) ? this._currentUrl : + url.format(Object.assign(currentUrlParts, { host: currentHost })); + + // Create the redirected request + var redirectUrl = resolveUrl(location, currentUrl); + debug("redirecting to", redirectUrl.href); + this._isRedirect = true; + spreadUrlObject(redirectUrl, this._options); + + // Drop confidential headers when redirecting to a less secure protocol + // or to a different domain that is not a superdomain + if (redirectUrl.protocol !== currentUrlParts.protocol && + redirectUrl.protocol !== "https:" || + redirectUrl.host !== currentHost && + !isSubdomain(redirectUrl.host, currentHost)) { + removeMatchingHeaders(/^(?:(?:proxy-)?authorization|cookie)$/i, this._options.headers); + } + + // Evaluate the beforeRedirect callback + if (isFunction(beforeRedirect)) { + var responseDetails = { + headers: response.headers, + statusCode: statusCode, + }; + var requestDetails = { + url: currentUrl, + method: method, + headers: requestHeaders, + }; + beforeRedirect(this._options, responseDetails, requestDetails); + this._sanitizeOptions(this._options); + } - if (util.isDisturbed(stream)) { - throw new TypeError('The body has already been consumed.') + // Perform the redirected request + this._performRequest(); +}; + +// Wraps the key/value object of protocols with redirect functionality +function wrap(protocols) { + // Default settings + var exports = { + maxRedirects: 21, + maxBodyLength: 10 * 1024 * 1024, + }; + + // Wrap each protocol + var nativeProtocols = {}; + Object.keys(protocols).forEach(function (scheme) { + var protocol = scheme + ":"; + var nativeProtocol = nativeProtocols[protocol] = protocols[scheme]; + var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol); + + // Executes a request, following redirects + function request(input, options, callback) { + // Parse parameters, ensuring that input is an object + if (isURL(input)) { + input = spreadUrlObject(input); + } + else if (isString(input)) { + input = spreadUrlObject(parseUrl(input)); + } + else { + callback = options; + options = validateUrl(input); + input = { protocol: protocol }; + } + if (isFunction(options)) { + callback = options; + options = null; } - if (stream.locked) { - throw new TypeError('The stream is locked.') + // Set defaults + options = Object.assign({ + maxRedirects: exports.maxRedirects, + maxBodyLength: exports.maxBodyLength, + }, input, options); + options.nativeProtocols = nativeProtocols; + if (!isString(options.host) && !isString(options.hostname)) { + options.hostname = "::1"; } - // Compat. - stream[kBodyUsed] = true + assert.equal(options.protocol, protocol, "protocol mismatch"); + debug("options", options); + return new RedirectableRequest(options, callback); + } - yield * stream + // Executes a GET request, following redirects + function get(input, options, callback) { + var wrappedRequest = wrappedProtocol.request(input, options, callback); + wrappedRequest.end(); + return wrappedRequest; + } + + // Expose the properties on the wrapped protocol + Object.defineProperties(wrappedProtocol, { + request: { value: request, configurable: true, enumerable: true, writable: true }, + get: { value: get, configurable: true, enumerable: true, writable: true }, + }); + }); + return exports; +} + +function noop() { /* empty */ } + +function parseUrl(input) { + var parsed; + /* istanbul ignore else */ + if (useNativeURL) { + parsed = new URL(input); + } + else { + // Ensure the URL is valid and absolute + parsed = validateUrl(url.parse(input)); + if (!isString(parsed.protocol)) { + throw new InvalidUrlError({ input }); } } + return parsed; } -function throwIfAborted (state) { - if (state.aborted) { - throw new DOMException('The operation was aborted.', 'AbortError') +function resolveUrl(relative, base) { + /* istanbul ignore next */ + return useNativeURL ? new URL(relative, base) : parseUrl(url.resolve(base, relative)); +} + +function validateUrl(input) { + if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { + throw new InvalidUrlError({ input: input.href || input }); } + if (/^\[/.test(input.host) && !/^\[[:0-9a-f]+\](:\d+)?$/i.test(input.host)) { + throw new InvalidUrlError({ input: input.href || input }); + } + return input; } -function bodyMixinMethods (instance) { - const methods = { - blob () { - // The blob() method steps are to return the result of - // running consume body with this and the following step - // given a byte sequence bytes: return a Blob whose - // contents are bytes and whose type attribute is this’s - // MIME type. - return specConsumeBody(this, (bytes) => { - let mimeType = bodyMimeType(this) +function spreadUrlObject(urlObject, target) { + var spread = target || {}; + for (var key of preservedUrlFields) { + spread[key] = urlObject[key]; + } - if (mimeType === 'failure') { - mimeType = '' - } else if (mimeType) { - mimeType = serializeAMimeType(mimeType) - } + // Fix IPv6 hostname + if (spread.hostname.startsWith("[")) { + spread.hostname = spread.hostname.slice(1, -1); + } + // Ensure port is a number + if (spread.port !== "") { + spread.port = Number(spread.port); + } + // Concatenate path + spread.path = spread.search ? spread.pathname + spread.search : spread.pathname; - // Return a Blob whose contents are bytes and type attribute - // is mimeType. - return new Blob([bytes], { type: mimeType }) - }, instance) - }, + return spread; +} - arrayBuffer () { - // The arrayBuffer() method steps are to return the result - // of running consume body with this and the following step - // given a byte sequence bytes: return a new ArrayBuffer - // whose contents are bytes. - return specConsumeBody(this, (bytes) => { - return new Uint8Array(bytes).buffer - }, instance) - }, +function removeMatchingHeaders(regex, headers) { + var lastValue; + for (var header in headers) { + if (regex.test(header)) { + lastValue = headers[header]; + delete headers[header]; + } + } + return (lastValue === null || typeof lastValue === "undefined") ? + undefined : String(lastValue).trim(); +} - text () { - // The text() method steps are to return the result of running - // consume body with this and UTF-8 decode. - return specConsumeBody(this, utf8DecodeBytes, instance) - }, +function createErrorType(code, message, baseClass) { + // Create constructor + function CustomError(properties) { + Error.captureStackTrace(this, this.constructor); + Object.assign(this, properties || {}); + this.code = code; + this.message = this.cause ? message + ": " + this.cause.message : message; + } - json () { - // The json() method steps are to return the result of running - // consume body with this and parse JSON from bytes. - return specConsumeBody(this, parseJSONFromBytes, instance) + // Attach constructor and set default properties + CustomError.prototype = new (baseClass || Error)(); + Object.defineProperties(CustomError.prototype, { + constructor: { + value: CustomError, + enumerable: false, + }, + name: { + value: "Error [" + code + "]", + enumerable: false, }, + }); + return CustomError; +} - async formData () { - webidl.brandCheck(this, instance) +function destroyRequest(request, error) { + for (var event of events) { + request.removeListener(event, eventHandlers[event]); + } + request.on("error", noop); + request.destroy(error); +} - throwIfAborted(this[kState]) +function isSubdomain(subdomain, domain) { + assert(isString(subdomain) && isString(domain)); + var dot = subdomain.length - domain.length - 1; + return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain); +} - const contentType = this.headers.get('Content-Type') +function isString(value) { + return typeof value === "string" || value instanceof String; +} - // If mimeType’s essence is "multipart/form-data", then: - if (/multipart\/form-data/.test(contentType)) { - const headers = {} - for (const [key, value] of this.headers) headers[key.toLowerCase()] = value - - const responseFormData = new FormData() - - let busboy - - try { - busboy = new Busboy({ - headers, - preservePath: true - }) - } catch (err) { - throw new DOMException(`${err}`, 'AbortError') - } +function isFunction(value) { + return typeof value === "function"; +} - busboy.on('field', (name, value) => { - responseFormData.append(name, value) - }) - busboy.on('file', (name, value, filename, encoding, mimeType) => { - const chunks = [] +function isBuffer(value) { + return typeof value === "object" && ("length" in value); +} - if (encoding === 'base64' || encoding.toLowerCase() === 'base64') { - let base64chunk = '' +function isURL(value) { + return URL && value instanceof URL; +} - value.on('data', (chunk) => { - base64chunk += chunk.toString().replace(/[\r\n]/gm, '') +// Exports +module.exports = wrap({ http: http, https: https }); +module.exports.wrap = wrap; - const end = base64chunk.length - base64chunk.length % 4 - chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64')) - base64chunk = base64chunk.slice(end) - }) - value.on('end', () => { - chunks.push(Buffer.from(base64chunk, 'base64')) - responseFormData.append(name, new File(chunks, filename, { type: mimeType })) - }) - } else { - value.on('data', (chunk) => { - chunks.push(chunk) - }) - value.on('end', () => { - responseFormData.append(name, new File(chunks, filename, { type: mimeType })) - }) - } - }) +/***/ }), - const busboyResolve = new Promise((resolve, reject) => { - busboy.on('finish', resolve) - busboy.on('error', (err) => reject(new TypeError(err))) - }) +/***/ 64334: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk) - busboy.end() - await busboyResolve +var CombinedStream = __nccwpck_require__(85443); +var util = __nccwpck_require__(73837); +var path = __nccwpck_require__(71017); +var http = __nccwpck_require__(13685); +var https = __nccwpck_require__(95687); +var parseUrl = (__nccwpck_require__(57310).parse); +var fs = __nccwpck_require__(57147); +var Stream = (__nccwpck_require__(12781).Stream); +var mime = __nccwpck_require__(43583); +var asynckit = __nccwpck_require__(14812); +var populate = __nccwpck_require__(17142); - return responseFormData - } else if (/application\/x-www-form-urlencoded/.test(contentType)) { - // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then: +// Public API +module.exports = FormData; - // 1. Let entries be the result of parsing bytes. - let entries - try { - let text = '' - // application/x-www-form-urlencoded parser will keep the BOM. - // https://url.spec.whatwg.org/#concept-urlencoded-parser - // Note that streaming decoder is stateful and cannot be reused - const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) +// make it a Stream +util.inherits(FormData, CombinedStream); - for await (const chunk of consumeBody(this[kState].body)) { - if (!isUint8Array(chunk)) { - throw new TypeError('Expected Uint8Array chunk') - } - text += streamingDecoder.decode(chunk, { stream: true }) - } - text += streamingDecoder.decode() - entries = new URLSearchParams(text) - } catch (err) { - // istanbul ignore next: Unclear when new URLSearchParams can fail on a string. - // 2. If entries is failure, then throw a TypeError. - throw Object.assign(new TypeError(), { cause: err }) - } +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } - // 3. Return a new FormData object whose entries are entries. - const formData = new FormData() - for (const [name, value] of entries) { - formData.append(name, value) - } - return formData - } else { - // Wait a tick before checking if the request has been aborted. - // Otherwise, a TypeError can be thrown when an AbortError should. - await Promise.resolve() + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; - throwIfAborted(this[kState]) + CombinedStream.call(this); - // Otherwise, throw a TypeError. - throw webidl.errors.exception({ - header: `${instance.name}.formData`, - message: 'Could not parse content as FormData.' - }) - } - } + options = options || {}; + for (var option in options) { + this[option] = options[option]; } - - return methods } -function mixinBody (prototype) { - Object.assign(prototype.prototype, bodyMixinMethods(prototype)) -} +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; -/** - * @see https://fetch.spec.whatwg.org/#concept-body-consume-body - * @param {Response|Request} object - * @param {(value: unknown) => unknown} convertBytesToJSValue - * @param {Response|Request} instance - */ -async function specConsumeBody (object, convertBytesToJSValue, instance) { - webidl.brandCheck(object, instance) +FormData.prototype.append = function(field, value, options) { - throwIfAborted(object[kState]) + options = options || {}; - // 1. If object is unusable, then return a promise rejected - // with a TypeError. - if (bodyUnusable(object[kState].body)) { - throw new TypeError('Body is unusable') + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; } - // 2. Let promise be a new promise. - const promise = createDeferredPromise() - - // 3. Let errorSteps given error be to reject promise with error. - const errorSteps = (error) => promise.reject(error) + var append = CombinedStream.prototype.append.bind(this); - // 4. Let successSteps given a byte sequence data be to resolve - // promise with the result of running convertBytesToJSValue - // with data. If that threw an exception, then run errorSteps - // with that exception. - const successSteps = (data) => { - try { - promise.resolve(convertBytesToJSValue(data)) - } catch (e) { - errorSteps(e) - } + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; } - // 5. If object’s body is null, then run successSteps with an - // empty byte sequence. - if (object[kState].body == null) { - successSteps(new Uint8Array()) - return promise.promise + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; } - // 6. Otherwise, fully read object’s body given successSteps, - // errorSteps, and object’s relevant global object. - await fullyReadBody(object[kState].body, successSteps, errorSteps) + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); - // 7. Return promise. - return promise.promise -} + append(header); + append(value); + append(footer); -// https://fetch.spec.whatwg.org/#body-unusable -function bodyUnusable (body) { - // An object including the Body interface mixin is - // said to be unusable if its body is non-null and - // its body’s stream is disturbed or locked. - return body != null && (body.stream.locked || util.isDisturbed(body.stream)) -} + // pass along options.knownLength + this._trackLength(header, value, options); +}; -/** - * @see https://encoding.spec.whatwg.org/#utf-8-decode - * @param {Buffer} buffer - */ -function utf8DecodeBytes (buffer) { - if (buffer.length === 0) { - return '' +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); } - // 1. Let buffer be the result of peeking three bytes from - // ioQueue, converted to a byte sequence. + this._valueLength += valueLength; - // 2. If buffer is 0xEF 0xBB 0xBF, then read three - // bytes from ioQueue. (Do nothing with those bytes.) - if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) { - buffer = buffer.subarray(3) + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response or not a stream + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { + return; } - // 3. Process a queue with an instance of UTF-8’s - // decoder, ioQueue, output, and "replacement". - const output = textDecoder.decode(buffer) + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; - // 4. Return output. - return output -} +FormData.prototype._lengthRetriever = function(value, callback) { -/** - * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value - * @param {Uint8Array} bytes - */ -function parseJSONFromBytes (bytes) { - return JSON.parse(utf8DecodeBytes(bytes)) -} + if (value.hasOwnProperty('fd')) { -/** - * @see https://fetch.spec.whatwg.org/#concept-body-mime-type - * @param {import('./response').Response|import('./request').Request} object - */ -function bodyMimeType (object) { - const { headersList } = object[kState] - const contentType = headersList.get('content-type') + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { - if (contentType === null) { - return 'failure' - } + var fileSize; - return parseMIMEType(contentType) -} + if (err) { + callback(err); + return; + } -module.exports = { - extractBody, - safelyExtractBody, - cloneBody, - mixinBody -} + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); -/***/ }), + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); -/***/ 1037: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // something else + } else { + callback('Unknown stream'); + } +}; -"use strict"; +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); -const { MessageChannel, receiveMessageOnPort } = __nccwpck_require__(1267) + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; -const corsSafeListedMethods = ['GET', 'HEAD', 'POST'] -const corsSafeListedMethodsSet = new Set(corsSafeListedMethods) + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } -const nullBodyStatus = [101, 204, 205, 304] + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; -const redirectStatus = [301, 302, 303, 307, 308] -const redirectStatusSet = new Set(redirectStatus) + // skip nullish headers. + if (header == null) { + continue; + } -// https://fetch.spec.whatwg.org/#block-bad-port -const badPorts = [ - '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79', - '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137', - '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532', - '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723', - '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697', - '10080' -] + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } -const badPortsSet = new Set(badPorts) + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } -// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies -const referrerPolicy = [ - '', - 'no-referrer', - 'no-referrer-when-downgrade', - 'same-origin', - 'origin', - 'strict-origin', - 'origin-when-cross-origin', - 'strict-origin-when-cross-origin', - 'unsafe-url' -] -const referrerPolicySet = new Set(referrerPolicy) + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; -const requestRedirect = ['follow', 'manual', 'error'] +FormData.prototype._getContentDisposition = function(value, options) { -const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE'] -const safeMethodsSet = new Set(safeMethods) + var filename + , contentDisposition + ; -const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors'] + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } -const requestCredentials = ['omit', 'same-origin', 'include'] + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } -const requestCache = [ - 'default', - 'no-store', - 'reload', - 'no-cache', - 'force-cache', - 'only-if-cached' -] + return contentDisposition; +}; -// https://fetch.spec.whatwg.org/#request-body-header-name -const requestBodyHeader = [ - 'content-encoding', - 'content-language', - 'content-location', - 'content-type', - // See https://github.com/nodejs/undici/issues/2021 - // 'Content-Length' is a forbidden header name, which is typically - // removed in the Headers implementation. However, undici doesn't - // filter out headers, so we add it here. - 'content-length' -] +FormData.prototype._getContentType = function(value, options) { -// https://fetch.spec.whatwg.org/#enumdef-requestduplex -const requestDuplex = [ - 'half' -] + // use custom content-type above all + var contentType = options.contentType; -// http://fetch.spec.whatwg.org/#forbidden-method -const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK'] -const forbiddenMethodsSet = new Set(forbiddenMethods) + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } -const subresource = [ - 'audio', - 'audioworklet', - 'font', - 'image', - 'manifest', - 'paintworklet', - 'script', - 'style', - 'track', - 'video', - 'xslt', - '' -] -const subresourceSet = new Set(subresource) + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } -/** @type {globalThis['DOMException']} */ -const DOMException = globalThis.DOMException ?? (() => { - // DOMException was only made a global in Node v17.0.0, - // but fetch supports >= v16.8. - try { - atob('~') - } catch (err) { - return Object.getPrototypeOf(err).constructor + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; } -})() -let channel + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } -/** @type {globalThis['structuredClone']} */ -const structuredClone = - globalThis.structuredClone ?? - // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js - // structuredClone was added in v17.0.0, but fetch supports v16.8 - function structuredClone (value, options = undefined) { - if (arguments.length === 0) { - throw new TypeError('missing argument') + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); } - if (!channel) { - channel = new MessageChannel() + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; } - channel.port1.unref() - channel.port2.unref() - channel.port1.postMessage(value, options?.transfer) - return receiveMessageOnPort(channel.port2).message } -module.exports = { - DOMException, - structuredClone, - subresource, - forbiddenMethods, - requestBodyHeader, - referrerPolicy, - requestRedirect, - requestMode, - requestCredentials, - requestCache, - redirectStatus, - corsSafeListedMethods, - nullBodyStatus, - safeMethods, - badPorts, - requestDuplex, - subresourceSet, - badPortsSet, - redirectStatusSet, - corsSafeListedMethodsSet, - safeMethodsSet, - forbiddenMethodsSet, - referrerPolicySet -} + return formHeaders; +}; +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; -/***/ }), +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } -/***/ 685: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return this._boundary; +}; -const assert = __nccwpck_require__(9491) -const { atob } = __nccwpck_require__(4300) -const { isomorphicDecode } = __nccwpck_require__(2538) +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); -const encoder = new TextEncoder() + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { -/** - * @see https://mimesniff.spec.whatwg.org/#http-token-code-point - */ -const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/ -const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line -/** - * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point - */ -const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } -// https://fetch.spec.whatwg.org/#data-url-processor -/** @param {URL} dataURL */ -function dataURLProcessor (dataURL) { - // 1. Assert: dataURL’s scheme is "data". - assert(dataURL.protocol === 'data:') + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } - // 2. Let input be the result of running the URL - // serializer on dataURL with exclude fragment - // set to true. - let input = URLSerializer(dataURL, true) + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; - // 3. Remove the leading "data:" string from input. - input = input.slice(5) +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } - // 4. Let position point at the start of input. - const position = { position: 0 } + this._boundary = boundary; +}; - // 5. Let mimeType be the result of collecting a - // sequence of code points that are not equal - // to U+002C (,), given position. - let mimeType = collectASequenceOfCodePointsFast( - ',', - input, - position - ) +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; - // 6. Strip leading and trailing ASCII whitespace - // from mimeType. - // Undici implementation note: we need to store the - // length because if the mimetype has spaces removed, - // the wrong amount will be sliced from the input in - // step #9 - const mimeTypeLength = mimeType.length - mimeType = removeASCIIWhitespace(mimeType, true, true) + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } - // 7. If position is past the end of input, then - // return failure - if (position.position >= input.length) { - return 'failure' + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); } - // 8. Advance position by 1. - position.position++ + return knownLength; +}; - // 9. Let encodedBody be the remainder of input. - const encodedBody = input.slice(mimeTypeLength + 1) +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; - // 10. Let body be the percent-decoding of encodedBody. - let body = stringPercentDecode(encodedBody) + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } - // 11. If mimeType ends with U+003B (;), followed by - // zero or more U+0020 SPACE, followed by an ASCII - // case-insensitive match for "base64", then: - if (/;(\u0020){0,}base64$/i.test(mimeType)) { - // 1. Let stringBody be the isomorphic decode of body. - const stringBody = isomorphicDecode(body) + return hasKnownLength; +}; - // 2. Set body to the forgiving-base64 decode of - // stringBody. - body = forgivingBase64(stringBody) +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; - // 3. If body is failure, then return failure. - if (body === 'failure') { - return 'failure' + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; } - // 4. Remove the last 6 code points from mimeType. - mimeType = mimeType.slice(0, -6) + values.forEach(function(length) { + knownLength += length; + }); - // 5. Remove trailing U+0020 SPACE code points from mimeType, - // if any. - mimeType = mimeType.replace(/(\u0020)+$/, '') + cb(null, knownLength); + }); +}; - // 6. Remove the last U+003B (;) code point from mimeType. - mimeType = mimeType.slice(0, -1) - } +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { - // 12. If mimeType starts with U+003B (;), then prepend - // "text/plain" to mimeType. - if (mimeType.startsWith(';')) { - mimeType = 'text/plain' + mimeType + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } } - // 13. Let mimeTypeRecord be the result of parsing - // mimeType. - let mimeTypeRecord = parseMIMEType(mimeType) + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); - // 14. If mimeTypeRecord is failure, then set - // mimeTypeRecord to text/plain;charset=US-ASCII. - if (mimeTypeRecord === 'failure') { - mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII') + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); } - // 15. Return a new data: URL struct whose MIME - // type is mimeTypeRecord and body is body. - // https://fetch.spec.whatwg.org/#data-url-struct - return { mimeType: mimeTypeRecord, body } -} + // get content length and fire away + this.getLength(function(err, length) { + if (err && err !== 'Unknown stream') { + this._error(err); + return; + } -// https://url.spec.whatwg.org/#concept-url-serializer -/** - * @param {URL} url - * @param {boolean} excludeFragment - */ -function URLSerializer (url, excludeFragment = false) { - if (!excludeFragment) { - return url.href - } + // add content length + if (length) { + request.setHeader('Content-Length', length); + } - const href = url.href - const hashLength = url.hash.length + this.pipe(request); + if (cb) { + var onResponse; - return hashLength === 0 ? href : href.substring(0, href.length - hashLength) -} + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); -// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points -/** - * @param {(char: string) => boolean} condition - * @param {string} input - * @param {{ position: number }} position - */ -function collectASequenceOfCodePoints (condition, input, position) { - // 1. Let result be the empty string. - let result = '' + return cb.call(this, error, responce); + }; - // 2. While position doesn’t point past the end of input and the - // code point at position within input meets the condition condition: - while (position.position < input.length && condition(input[position.position])) { - // 1. Append that code point to the end of result. - result += input[position.position] + onResponse = callback.bind(this, null); - // 2. Advance position by 1. - position.position++ - } + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); - // 3. Return result. - return result -} + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; + + +/***/ }), + +/***/ 17142: +/***/ ((module) => { + +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; + + +/***/ }), + +/***/ 75454: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; /** - * A faster collectASequenceOfCodePoints that only works when comparing a single character. - * @param {string} char - * @param {string} input - * @param {{ position: number }} position + * Form Data format: + * + +```txt +--FormStreamBoundary1349886663601\r\n +Content-Disposition: form-data; name="foo"\r\n +\r\n +\r\n +--FormStreamBoundary1349886663601\r\n +Content-Disposition: form-data; name="data"\r\n +Content-Type: application/json\r\n +\r\n +\r\n +--FormStreamBoundary1349886663601\r\n +Content-Disposition: form-data; name="file"; filename="formstream.test.js"\r\n +Content-Type: application/javascript\r\n +\r\n + +... + +\r\n +--FormStreamBoundary1349886663601\r\n +Content-Disposition: form-data; name="pic"; filename="fawave.png"\r\n +Content-Type: image/png\r\n +\r\n +\r\n +--FormStreamBoundary1349886663601-- +``` + + * */ -function collectASequenceOfCodePointsFast (char, input, position) { - const idx = input.indexOf(char, position.position) - const start = position.position - if (idx === -1) { - position.position = input.length - return input.slice(start) + + +var debug = (__nccwpck_require__(73837).debuglog)('formstream'); +var Stream = __nccwpck_require__(12781); +var parseStream = __nccwpck_require__(11473); +var util = __nccwpck_require__(73837); +var mime = __nccwpck_require__(29994); +var path = __nccwpck_require__(71017); +var fs = __nccwpck_require__(57147); +var destroy = __nccwpck_require__(43225); +var hex = __nccwpck_require__(26002); + +var PADDING = '--'; +var NEW_LINE = '\r\n'; +var NEW_LINE_BUFFER = Buffer.from(NEW_LINE); + +function FormStream(options) { + if (!(this instanceof FormStream)) { + return new FormStream(options); } - position.position = idx - return input.slice(start, position.position) -} + FormStream.super_.call(this); -// https://url.spec.whatwg.org/#string-percent-decode -/** @param {string} input */ -function stringPercentDecode (input) { - // 1. Let bytes be the UTF-8 encoding of input. - const bytes = encoder.encode(input) + this._boundary = this._generateBoundary(); + this._streams = []; + this._buffers = []; + this._endData = Buffer.from(PADDING + this._boundary + PADDING + NEW_LINE); + this._contentLength = 0; + this._isAllStreamSizeKnown = true; + this._knownStreamSize = 0; + this._minChunkSize = options && options.minChunkSize || 0; - // 2. Return the percent-decoding of bytes. - return percentDecode(bytes) + this.isFormStream = true; + debug('start boundary\n%s', this._boundary); } -// https://url.spec.whatwg.org/#percent-decode -/** @param {Uint8Array} input */ -function percentDecode (input) { - // 1. Let output be an empty byte sequence. - /** @type {number[]} */ - const output = [] +util.inherits(FormStream, Stream); +module.exports = FormStream; - // 2. For each byte byte in input: - for (let i = 0; i < input.length; i++) { - const byte = input[i] +FormStream.prototype._generateBoundary = function() { + // https://github.com/felixge/node-form-data/blob/master/lib/form_data.js#L162 + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } - // 1. If byte is not 0x25 (%), then append byte to output. - if (byte !== 0x25) { - output.push(byte) + return boundary; +}; - // 2. Otherwise, if byte is 0x25 (%) and the next two bytes - // after byte in input are not in the ranges - // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F), - // and 0x61 (a) to 0x66 (f), all inclusive, append byte - // to output. - } else if ( - byte === 0x25 && - !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2])) - ) { - output.push(0x25) +FormStream.prototype.setTotalStreamSize = function (size) { + // this method should not make any sense if the length of each stream is known. + if (this._isAllStreamSizeKnown) { + return this; + } - // 3. Otherwise: - } else { - // 1. Let bytePoint be the two bytes after byte in input, - // decoded, and then interpreted as hexadecimal number. - const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]) - const bytePoint = Number.parseInt(nextTwoBytes, 16) + size = size || 0; - // 2. Append a byte whose value is bytePoint to output. - output.push(bytePoint) + for (var i = 0; i < this._streams.length; i++) { + size += this._streams[i][0].length; + size += NEW_LINE_BUFFER.length; // stream field end padding size + } - // 3. Skip the next two bytes in input. - i += 2 + this._knownStreamSize = size; + this._isAllStreamSizeKnown = true; + debug('set total size: %s', size); + return this; +}; + +FormStream.prototype.headers = function (options) { + var headers = { + 'Content-Type': 'multipart/form-data; boundary=' + this._boundary + }; + + // calculate total stream size + this._contentLength += this._knownStreamSize; + // calculate length of end padding + this._contentLength += this._endData.length; + + if (this._isAllStreamSizeKnown) { + headers['Content-Length'] = String(this._contentLength); + } + + if (options) { + for (var k in options) { + headers[k] = options[k]; } } - // 3. Return output. - return Uint8Array.from(output) -} + debug('headers: %j', headers); + return headers; +}; -// https://mimesniff.spec.whatwg.org/#parse-a-mime-type -/** @param {string} input */ -function parseMIMEType (input) { - // 1. Remove any leading and trailing HTTP whitespace - // from input. - input = removeHTTPWhitespace(input, true, true) +FormStream.prototype.file = function (name, filepath, filename, filesize) { + if (typeof filename === 'number' && !filesize) { + filesize = filename; + filename = path.basename(filepath); + } + if (!filename) { + filename = path.basename(filepath); + } - // 2. Let position be a position variable for input, - // initially pointing at the start of input. - const position = { position: 0 } + var mimeType = mime.getType(filename); + var stream = fs.createReadStream(filepath); - // 3. Let type be the result of collecting a sequence - // of code points that are not U+002F (/) from - // input, given position. - const type = collectASequenceOfCodePointsFast( - '/', - input, - position - ) + return this.stream(name, stream, filename, mimeType, filesize); +}; - // 4. If type is the empty string or does not solely - // contain HTTP token code points, then return failure. - // https://mimesniff.spec.whatwg.org/#http-token-code-point - if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) { - return 'failure' +/** + * Add a form field + * @param {String} name field name + * @param {String|Buffer} value field value + * @param {String} [mimeType] field mimeType + * @return {this} + */ +FormStream.prototype.field = function (name, value, mimeType) { + if (!Buffer.isBuffer(value)) { + // field(String, Number) + // https://github.com/qiniu/nodejs-sdk/issues/123 + if (typeof value === 'number') { + value = String(value); + } + value = Buffer.from(value); } + return this.buffer(name, value, null, mimeType); +}; - // 5. If position is past the end of input, then return - // failure - if (position.position > input.length) { - return 'failure' +FormStream.prototype.stream = function (name, stream, filename, mimeType, size) { + if (typeof mimeType === 'number' && !size) { + size = mimeType; + mimeType = mime.getType(filename); + } else if (!mimeType) { + mimeType = mime.getType(filename); } - // 6. Advance position by 1. (This skips past U+002F (/).) - position.position++ + stream.once('error', this.emit.bind(this, 'error')); + // if form stream destroy, also destroy the source stream + this.once('destroy', function () { + destroy(stream); + }); - // 7. Let subtype be the result of collecting a sequence of - // code points that are not U+003B (;) from input, given - // position. - let subtype = collectASequenceOfCodePointsFast( - ';', - input, - position - ) + var leading = this._leading({ name: name, filename: filename }, mimeType); - // 8. Remove any trailing HTTP whitespace from subtype. - subtype = removeHTTPWhitespace(subtype, false, true) + var ps = parseStream().pause(); + stream.pipe(ps); - // 9. If subtype is the empty string or does not solely - // contain HTTP token code points, then return failure. - if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) { - return 'failure' + this._streams.push([leading, ps]); + + // if the size of this stream is known, plus the total content-length; + // otherwise, content-length is unknown. + if (typeof size === 'number') { + this._knownStreamSize += leading.length; + this._knownStreamSize += size; + this._knownStreamSize += NEW_LINE_BUFFER.length; + } else { + this._isAllStreamSizeKnown = false; } - const typeLowercase = type.toLowerCase() - const subtypeLowercase = subtype.toLowerCase() + process.nextTick(this.resume.bind(this)); - // 10. Let mimeType be a new MIME type record whose type - // is type, in ASCII lowercase, and subtype is subtype, - // in ASCII lowercase. - // https://mimesniff.spec.whatwg.org/#mime-type - const mimeType = { - type: typeLowercase, - subtype: subtypeLowercase, - /** @type {Map} */ - parameters: new Map(), - // https://mimesniff.spec.whatwg.org/#mime-type-essence - essence: `${typeLowercase}/${subtypeLowercase}` + return this; +}; + +FormStream.prototype.buffer = function (name, buffer, filename, mimeType) { + if (filename && !mimeType) { + mimeType = mime.getType(filename); } - // 11. While position is not past the end of input: - while (position.position < input.length) { - // 1. Advance position by 1. (This skips past U+003B (;).) - position.position++ + var disposition = { name: name }; + if (filename) { + disposition.filename = filename; + } - // 2. Collect a sequence of code points that are HTTP - // whitespace from input given position. - collectASequenceOfCodePoints( - // https://fetch.spec.whatwg.org/#http-whitespace - char => HTTP_WHITESPACE_REGEX.test(char), - input, - position - ) + var leading = this._leading(disposition, mimeType); - // 3. Let parameterName be the result of collecting a - // sequence of code points that are not U+003B (;) - // or U+003D (=) from input, given position. - let parameterName = collectASequenceOfCodePoints( - (char) => char !== ';' && char !== '=', - input, - position - ) + // plus buffer length to total content-length + var bufferSize = leading.length + buffer.length + NEW_LINE_BUFFER.length; + this._buffers.push(Buffer.concat([leading, buffer, NEW_LINE_BUFFER], bufferSize)); + this._contentLength += bufferSize; - // 4. Set parameterName to parameterName, in ASCII - // lowercase. - parameterName = parameterName.toLowerCase() + process.nextTick(this.resume.bind(this)); + if (debug.enabled) { + if (buffer.length > 512) { + debug('new buffer field, content size: %d\n%s%s', + buffer.length, leading.toString(), hex(buffer.slice(0, 512))); + } else { + debug('new buffer field, content size: %d\n%s%s', + buffer.length, leading.toString(), hex(buffer)); + } + } + return this; +}; - // 5. If position is not past the end of input, then: - if (position.position < input.length) { - // 1. If the code point at position within input is - // U+003B (;), then continue. - if (input[position.position] === ';') { - continue - } +FormStream.prototype._leading = function (disposition, type) { + var leading = [PADDING + this._boundary]; - // 2. Advance position by 1. (This skips past U+003D (=).) - position.position++ - } + var dispositions = []; - // 6. If position is past the end of input, then break. - if (position.position > input.length) { - break + if (disposition) { + for (var k in disposition) { + dispositions.push(k + '="' + disposition[k] + '"'); } + } - // 7. Let parameterValue be null. - let parameterValue = null + leading.push('Content-Disposition: form-data; ' + dispositions.join('; ')); + if (type) { + leading.push('Content-Type: ' + type); + } - // 8. If the code point at position within input is - // U+0022 ("), then: - if (input[position.position] === '"') { - // 1. Set parameterValue to the result of collecting - // an HTTP quoted string from input, given position - // and the extract-value flag. - parameterValue = collectAnHTTPQuotedString(input, position, true) + leading.push(''); + leading.push(''); + return Buffer.from(leading.join(NEW_LINE)); +}; - // 2. Collect a sequence of code points that are not - // U+003B (;) from input, given position. - collectASequenceOfCodePointsFast( - ';', - input, - position - ) +FormStream.prototype._emitBuffers = function () { + if (!this._buffers.length) { + return; + } - // 9. Otherwise: - } else { - // 1. Set parameterValue to the result of collecting - // a sequence of code points that are not U+003B (;) - // from input, given position. - parameterValue = collectASequenceOfCodePointsFast( - ';', - input, - position - ) + for (var i = 0; i < this._buffers.length; i++) { + this.emit('data', this._buffers[i]); + } + this._buffers = []; +}; - // 2. Remove any trailing HTTP whitespace from parameterValue. - parameterValue = removeHTTPWhitespace(parameterValue, false, true) +FormStream.prototype._emitStream = function (item) { + var self = this; + // item: [ leading, stream ] + var streamSize = 0; + var chunkCount = 0; + const leading = item[0]; + self.emit('data', leading); + chunkCount++; + if (debug.enabled) { + debug('new stream, chunk index %d\n%s', chunkCount, leading.toString()); + } + + var stream = item[1]; + stream.on('data', function (data) { + self.emit('data', data); + streamSize += leading.length; + chunkCount++; + if (debug.enabled) { + if (data.length > 512) { + debug('stream chunk, size %d, chunk index %d, stream size %d\n%s...... only show 512 bytes ......', + data.length, chunkCount, streamSize, hex(data.slice(0, 512))); + } else { + debug('stream chunk, size %d, chunk index %d, stream size %d\n%s', + data.length, chunkCount, streamSize, hex(data)); + } + } + }); + stream.on('end', function () { + self.emit('data', NEW_LINE_BUFFER); + chunkCount++; + debug('stream end, chunk index %d, stream size %d', chunkCount, streamSize); + return process.nextTick(self.drain.bind(self)); + }); + stream.resume(); +}; - // 3. If parameterValue is the empty string, then continue. - if (parameterValue.length === 0) { - continue +FormStream.prototype._emitStreamWithChunkSize = function (item, minChunkSize) { + var self = this; + // item: [ leading, stream ] + var streamSize = 0; + var chunkCount = 0; + var bufferSize = 0; + var buffers = []; + const leading = item[0]; + buffers.push(leading); + bufferSize += leading.length; + if (debug.enabled) { + debug('new stream, with min chunk size: %d\n%s', minChunkSize, leading.toString()); + } + + var stream = item[1]; + stream.on('data', function (data) { + if (typeof data === 'string') { + data = Buffer.from(data, 'utf-8'); + } + buffers.push(data); + bufferSize += data.length; + streamSize += data.length; + debug('got stream data size %d, buffer size %d, stream size %d', + data.length, bufferSize, streamSize); + if (bufferSize >= minChunkSize) { + const chunk = Buffer.concat(buffers, bufferSize); + buffers = []; + bufferSize = 0; + self.emit('data', chunk); + chunkCount++; + if (debug.enabled) { + if (chunk.length > 512) { + debug('stream chunk, size %d, chunk index %d, stream size %d\n%s...... only show 512 bytes ......', + chunk.length, chunkCount, streamSize, hex(chunk.slice(0, 512))); + } else { + debug('stream chunk, size %d, chunk index %d, stream size %d\n%s', + chunk.length, chunkCount, streamSize, hex(chunk)); + } } } + }); + stream.on('end', function () { + buffers.push(NEW_LINE_BUFFER); + bufferSize += NEW_LINE_BUFFER.length; + const chunk = Buffer.concat(buffers, bufferSize); + self.emit('data', chunk); + chunkCount++; + if (chunk.length > 512) { + debug('stream end, size %d, chunk index %d, stream size %d\n%s...... only show 512 bytes ......', + chunk.length, chunkCount, streamSize, hex(chunk.slice(0, 512))); + } else { + debug('stream end, size %d, chunk index %d, stream size %d\n%s', + chunk.length, chunkCount, streamSize, hex(chunk)); + } + return process.nextTick(self.drain.bind(self)); + }); + stream.resume(); +}; - // 10. If all of the following are true - // - parameterName is not the empty string - // - parameterName solely contains HTTP token code points - // - parameterValue solely contains HTTP quoted-string token code points - // - mimeType’s parameters[parameterName] does not exist - // then set mimeType’s parameters[parameterName] to parameterValue. - if ( - parameterName.length !== 0 && - HTTP_TOKEN_CODEPOINTS.test(parameterName) && - (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) && - !mimeType.parameters.has(parameterName) - ) { - mimeType.parameters.set(parameterName, parameterValue) +FormStream.prototype._emitEnd = function () { + // ending format: + // + // --{boundary}--\r\n + this.emit('data', this._endData); + this.emit('end'); + if (debug.enabled) { + debug('end boundary\n%s', this._endData.toString()); + } +}; + +FormStream.prototype.drain = function () { + // debug('drain'); + this._emitBuffers(); + + var item = this._streams.shift(); + if (item) { + if (this._minChunkSize && this._minChunkSize > 0) { + this._emitStreamWithChunkSize(item, this._minChunkSize); + } else { + this._emitStream(item); } + } else { + this._emitEnd(); } - // 12. Return mimeType. - return mimeType -} + return this; +}; -// https://infra.spec.whatwg.org/#forgiving-base64-decode -/** @param {string} data */ -function forgivingBase64 (data) { - // 1. Remove all ASCII whitespace from data. - data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line +FormStream.prototype.resume = function () { + // debug('resume'); + this.paused = false; - // 2. If data’s code point length divides by 4 leaving - // no remainder, then: - if (data.length % 4 === 0) { - // 1. If data ends with one or two U+003D (=) code points, - // then remove them from data. - data = data.replace(/=?=$/, '') + if (!this._draining) { + this._draining = true; + this.drain(); } - // 3. If data’s code point length divides by 4 leaving - // a remainder of 1, then return failure. - if (data.length % 4 === 1) { - return 'failure' - } + return this; +}; - // 4. If data contains a code point that is not one of - // U+002B (+) - // U+002F (/) - // ASCII alphanumeric - // then return failure. - if (/[^+/0-9A-Za-z]/.test(data)) { - return 'failure' - } +FormStream.prototype.close = FormStream.prototype.destroy = function () { + this.emit('destroy'); + // debug('destroy or close'); +}; - const binary = atob(data) - const bytes = new Uint8Array(binary.length) - for (let byte = 0; byte < binary.length; byte++) { - bytes[byte] = binary.charCodeAt(byte) - } +/***/ }), - return bytes -} +/***/ 19320: +/***/ ((module) => { -// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string -// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string -/** - * @param {string} input - * @param {{ position: number }} position - * @param {boolean?} extractValue - */ -function collectAnHTTPQuotedString (input, position, extractValue) { - // 1. Let positionStart be position. - const positionStart = position.position +"use strict"; - // 2. Let value be the empty string. - let value = '' - // 3. Assert: the code point at position within input - // is U+0022 ("). - assert(input[position.position] === '"') +/* eslint no-invalid-this: 1 */ - // 4. Advance position by 1. - position.position++ +var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible '; +var toStr = Object.prototype.toString; +var max = Math.max; +var funcType = '[object Function]'; - // 5. While true: - while (true) { - // 1. Append the result of collecting a sequence of code points - // that are not U+0022 (") or U+005C (\) from input, given - // position, to value. - value += collectASequenceOfCodePoints( - (char) => char !== '"' && char !== '\\', - input, - position - ) +var concatty = function concatty(a, b) { + var arr = []; - // 2. If position is past the end of input, then break. - if (position.position >= input.length) { - break + for (var i = 0; i < a.length; i += 1) { + arr[i] = a[i]; + } + for (var j = 0; j < b.length; j += 1) { + arr[j + a.length] = b[j]; } - // 3. Let quoteOrBackslash be the code point at position within - // input. - const quoteOrBackslash = input[position.position] + return arr; +}; - // 4. Advance position by 1. - position.position++ +var slicy = function slicy(arrLike, offset) { + var arr = []; + for (var i = offset || 0, j = 0; i < arrLike.length; i += 1, j += 1) { + arr[j] = arrLike[i]; + } + return arr; +}; - // 5. If quoteOrBackslash is U+005C (\), then: - if (quoteOrBackslash === '\\') { - // 1. If position is past the end of input, then append - // U+005C (\) to value and break. - if (position.position >= input.length) { - value += '\\' - break - } +var joiny = function (arr, joiner) { + var str = ''; + for (var i = 0; i < arr.length; i += 1) { + str += arr[i]; + if (i + 1 < arr.length) { + str += joiner; + } + } + return str; +}; - // 2. Append the code point at position within input to value. - value += input[position.position] +module.exports = function bind(that) { + var target = this; + if (typeof target !== 'function' || toStr.apply(target) !== funcType) { + throw new TypeError(ERROR_MESSAGE + target); + } + var args = slicy(arguments, 1); + + var bound; + var binder = function () { + if (this instanceof bound) { + var result = target.apply( + this, + concatty(args, arguments) + ); + if (Object(result) === result) { + return result; + } + return this; + } + return target.apply( + that, + concatty(args, arguments) + ); - // 3. Advance position by 1. - position.position++ + }; - // 6. Otherwise: - } else { - // 1. Assert: quoteOrBackslash is U+0022 ("). - assert(quoteOrBackslash === '"') + var boundLength = max(0, target.length - args.length); + var boundArgs = []; + for (var i = 0; i < boundLength; i++) { + boundArgs[i] = '$' + i; + } - // 2. Break. - break + bound = Function('binder', 'return function (' + joiny(boundArgs, ',') + '){ return binder.apply(this,arguments); }')(binder); + + if (target.prototype) { + var Empty = function Empty() {}; + Empty.prototype = target.prototype; + bound.prototype = new Empty(); + Empty.prototype = null; } - } - // 6. If the extract-value flag is set, then return value. - if (extractValue) { - return value - } + return bound; +}; - // 7. Return the code points from positionStart to position, - // inclusive, within input. - return input.slice(positionStart, position.position) -} -/** - * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type - */ -function serializeAMimeType (mimeType) { - assert(mimeType !== 'failure') - const { parameters, essence } = mimeType +/***/ }), - // 1. Let serialization be the concatenation of mimeType’s - // type, U+002F (/), and mimeType’s subtype. - let serialization = essence +/***/ 88334: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 2. For each name → value of mimeType’s parameters: - for (let [name, value] of parameters.entries()) { - // 1. Append U+003B (;) to serialization. - serialization += ';' +"use strict"; - // 2. Append name to serialization. - serialization += name - // 3. Append U+003D (=) to serialization. - serialization += '=' +var implementation = __nccwpck_require__(19320); - // 4. If value does not solely contain HTTP token code - // points or value is the empty string, then: - if (!HTTP_TOKEN_CODEPOINTS.test(value)) { - // 1. Precede each occurence of U+0022 (") or - // U+005C (\) in value with U+005C (\). - value = value.replace(/(\\|")/g, '\\$1') +module.exports = Function.prototype.bind || implementation; - // 2. Prepend U+0022 (") to value. - value = '"' + value - // 3. Append U+0022 (") to value. - value += '"' - } +/***/ }), - // 5. Append value to serialization. - serialization += value - } +/***/ 74538: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 3. Return serialization. - return serialization -} +"use strict"; -/** - * @see https://fetch.spec.whatwg.org/#http-whitespace - * @param {string} char - */ -function isHTTPWhiteSpace (char) { - return char === '\r' || char === '\n' || char === '\t' || char === ' ' -} -/** - * @see https://fetch.spec.whatwg.org/#http-whitespace - * @param {string} str - */ -function removeHTTPWhitespace (str, leading = true, trailing = true) { - let lead = 0 - let trail = str.length - 1 +var undefined; - if (leading) { - for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++); - } +var $Error = __nccwpck_require__(28015); +var $EvalError = __nccwpck_require__(91933); +var $RangeError = __nccwpck_require__(54415); +var $ReferenceError = __nccwpck_require__(46279); +var $SyntaxError = __nccwpck_require__(75474); +var $TypeError = __nccwpck_require__(6361); +var $URIError = __nccwpck_require__(5065); - if (trailing) { - for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--); - } +var $Function = Function; - return str.slice(lead, trail + 1) -} +// eslint-disable-next-line consistent-return +var getEvalledConstructor = function (expressionSyntax) { + try { + return $Function('"use strict"; return (' + expressionSyntax + ').constructor;')(); + } catch (e) {} +}; -/** - * @see https://infra.spec.whatwg.org/#ascii-whitespace - * @param {string} char - */ -function isASCIIWhitespace (char) { - return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' ' +var $gOPD = Object.getOwnPropertyDescriptor; +if ($gOPD) { + try { + $gOPD({}, ''); + } catch (e) { + $gOPD = null; // this is IE 8, which has a broken gOPD + } } -/** - * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace - */ -function removeASCIIWhitespace (str, leading = true, trailing = true) { - let lead = 0 - let trail = str.length - 1 +var throwTypeError = function () { + throw new $TypeError(); +}; +var ThrowTypeError = $gOPD + ? (function () { + try { + // eslint-disable-next-line no-unused-expressions, no-caller, no-restricted-properties + arguments.callee; // IE 8 does not throw here + return throwTypeError; + } catch (calleeThrows) { + try { + // IE 8 throws on Object.getOwnPropertyDescriptor(arguments, '') + return $gOPD(arguments, 'callee').get; + } catch (gOPDthrows) { + return throwTypeError; + } + } + }()) + : throwTypeError; + +var hasSymbols = __nccwpck_require__(40587)(); +var hasProto = __nccwpck_require__(45894)(); + +var getProto = Object.getPrototypeOf || ( + hasProto + ? function (x) { return x.__proto__; } // eslint-disable-line no-proto + : null +); + +var needsEval = {}; + +var TypedArray = typeof Uint8Array === 'undefined' || !getProto ? undefined : getProto(Uint8Array); + +var INTRINSICS = { + __proto__: null, + '%AggregateError%': typeof AggregateError === 'undefined' ? undefined : AggregateError, + '%Array%': Array, + '%ArrayBuffer%': typeof ArrayBuffer === 'undefined' ? undefined : ArrayBuffer, + '%ArrayIteratorPrototype%': hasSymbols && getProto ? getProto([][Symbol.iterator]()) : undefined, + '%AsyncFromSyncIteratorPrototype%': undefined, + '%AsyncFunction%': needsEval, + '%AsyncGenerator%': needsEval, + '%AsyncGeneratorFunction%': needsEval, + '%AsyncIteratorPrototype%': needsEval, + '%Atomics%': typeof Atomics === 'undefined' ? undefined : Atomics, + '%BigInt%': typeof BigInt === 'undefined' ? undefined : BigInt, + '%BigInt64Array%': typeof BigInt64Array === 'undefined' ? undefined : BigInt64Array, + '%BigUint64Array%': typeof BigUint64Array === 'undefined' ? undefined : BigUint64Array, + '%Boolean%': Boolean, + '%DataView%': typeof DataView === 'undefined' ? undefined : DataView, + '%Date%': Date, + '%decodeURI%': decodeURI, + '%decodeURIComponent%': decodeURIComponent, + '%encodeURI%': encodeURI, + '%encodeURIComponent%': encodeURIComponent, + '%Error%': $Error, + '%eval%': eval, // eslint-disable-line no-eval + '%EvalError%': $EvalError, + '%Float32Array%': typeof Float32Array === 'undefined' ? undefined : Float32Array, + '%Float64Array%': typeof Float64Array === 'undefined' ? undefined : Float64Array, + '%FinalizationRegistry%': typeof FinalizationRegistry === 'undefined' ? undefined : FinalizationRegistry, + '%Function%': $Function, + '%GeneratorFunction%': needsEval, + '%Int8Array%': typeof Int8Array === 'undefined' ? undefined : Int8Array, + '%Int16Array%': typeof Int16Array === 'undefined' ? undefined : Int16Array, + '%Int32Array%': typeof Int32Array === 'undefined' ? undefined : Int32Array, + '%isFinite%': isFinite, + '%isNaN%': isNaN, + '%IteratorPrototype%': hasSymbols && getProto ? getProto(getProto([][Symbol.iterator]())) : undefined, + '%JSON%': typeof JSON === 'object' ? JSON : undefined, + '%Map%': typeof Map === 'undefined' ? undefined : Map, + '%MapIteratorPrototype%': typeof Map === 'undefined' || !hasSymbols || !getProto ? undefined : getProto(new Map()[Symbol.iterator]()), + '%Math%': Math, + '%Number%': Number, + '%Object%': Object, + '%parseFloat%': parseFloat, + '%parseInt%': parseInt, + '%Promise%': typeof Promise === 'undefined' ? undefined : Promise, + '%Proxy%': typeof Proxy === 'undefined' ? undefined : Proxy, + '%RangeError%': $RangeError, + '%ReferenceError%': $ReferenceError, + '%Reflect%': typeof Reflect === 'undefined' ? undefined : Reflect, + '%RegExp%': RegExp, + '%Set%': typeof Set === 'undefined' ? undefined : Set, + '%SetIteratorPrototype%': typeof Set === 'undefined' || !hasSymbols || !getProto ? undefined : getProto(new Set()[Symbol.iterator]()), + '%SharedArrayBuffer%': typeof SharedArrayBuffer === 'undefined' ? undefined : SharedArrayBuffer, + '%String%': String, + '%StringIteratorPrototype%': hasSymbols && getProto ? getProto(''[Symbol.iterator]()) : undefined, + '%Symbol%': hasSymbols ? Symbol : undefined, + '%SyntaxError%': $SyntaxError, + '%ThrowTypeError%': ThrowTypeError, + '%TypedArray%': TypedArray, + '%TypeError%': $TypeError, + '%Uint8Array%': typeof Uint8Array === 'undefined' ? undefined : Uint8Array, + '%Uint8ClampedArray%': typeof Uint8ClampedArray === 'undefined' ? undefined : Uint8ClampedArray, + '%Uint16Array%': typeof Uint16Array === 'undefined' ? undefined : Uint16Array, + '%Uint32Array%': typeof Uint32Array === 'undefined' ? undefined : Uint32Array, + '%URIError%': $URIError, + '%WeakMap%': typeof WeakMap === 'undefined' ? undefined : WeakMap, + '%WeakRef%': typeof WeakRef === 'undefined' ? undefined : WeakRef, + '%WeakSet%': typeof WeakSet === 'undefined' ? undefined : WeakSet +}; - if (leading) { - for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++); - } +if (getProto) { + try { + null.error; // eslint-disable-line no-unused-expressions + } catch (e) { + // https://github.com/tc39/proposal-shadowrealm/pull/384#issuecomment-1364264229 + var errorProto = getProto(getProto(e)); + INTRINSICS['%Error.prototype%'] = errorProto; + } +} + +var doEval = function doEval(name) { + var value; + if (name === '%AsyncFunction%') { + value = getEvalledConstructor('async function () {}'); + } else if (name === '%GeneratorFunction%') { + value = getEvalledConstructor('function* () {}'); + } else if (name === '%AsyncGeneratorFunction%') { + value = getEvalledConstructor('async function* () {}'); + } else if (name === '%AsyncGenerator%') { + var fn = doEval('%AsyncGeneratorFunction%'); + if (fn) { + value = fn.prototype; + } + } else if (name === '%AsyncIteratorPrototype%') { + var gen = doEval('%AsyncGenerator%'); + if (gen && getProto) { + value = getProto(gen.prototype); + } + } + + INTRINSICS[name] = value; + + return value; +}; - if (trailing) { - for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--); +var LEGACY_ALIASES = { + __proto__: null, + '%ArrayBufferPrototype%': ['ArrayBuffer', 'prototype'], + '%ArrayPrototype%': ['Array', 'prototype'], + '%ArrayProto_entries%': ['Array', 'prototype', 'entries'], + '%ArrayProto_forEach%': ['Array', 'prototype', 'forEach'], + '%ArrayProto_keys%': ['Array', 'prototype', 'keys'], + '%ArrayProto_values%': ['Array', 'prototype', 'values'], + '%AsyncFunctionPrototype%': ['AsyncFunction', 'prototype'], + '%AsyncGenerator%': ['AsyncGeneratorFunction', 'prototype'], + '%AsyncGeneratorPrototype%': ['AsyncGeneratorFunction', 'prototype', 'prototype'], + '%BooleanPrototype%': ['Boolean', 'prototype'], + '%DataViewPrototype%': ['DataView', 'prototype'], + '%DatePrototype%': ['Date', 'prototype'], + '%ErrorPrototype%': ['Error', 'prototype'], + '%EvalErrorPrototype%': ['EvalError', 'prototype'], + '%Float32ArrayPrototype%': ['Float32Array', 'prototype'], + '%Float64ArrayPrototype%': ['Float64Array', 'prototype'], + '%FunctionPrototype%': ['Function', 'prototype'], + '%Generator%': ['GeneratorFunction', 'prototype'], + '%GeneratorPrototype%': ['GeneratorFunction', 'prototype', 'prototype'], + '%Int8ArrayPrototype%': ['Int8Array', 'prototype'], + '%Int16ArrayPrototype%': ['Int16Array', 'prototype'], + '%Int32ArrayPrototype%': ['Int32Array', 'prototype'], + '%JSONParse%': ['JSON', 'parse'], + '%JSONStringify%': ['JSON', 'stringify'], + '%MapPrototype%': ['Map', 'prototype'], + '%NumberPrototype%': ['Number', 'prototype'], + '%ObjectPrototype%': ['Object', 'prototype'], + '%ObjProto_toString%': ['Object', 'prototype', 'toString'], + '%ObjProto_valueOf%': ['Object', 'prototype', 'valueOf'], + '%PromisePrototype%': ['Promise', 'prototype'], + '%PromiseProto_then%': ['Promise', 'prototype', 'then'], + '%Promise_all%': ['Promise', 'all'], + '%Promise_reject%': ['Promise', 'reject'], + '%Promise_resolve%': ['Promise', 'resolve'], + '%RangeErrorPrototype%': ['RangeError', 'prototype'], + '%ReferenceErrorPrototype%': ['ReferenceError', 'prototype'], + '%RegExpPrototype%': ['RegExp', 'prototype'], + '%SetPrototype%': ['Set', 'prototype'], + '%SharedArrayBufferPrototype%': ['SharedArrayBuffer', 'prototype'], + '%StringPrototype%': ['String', 'prototype'], + '%SymbolPrototype%': ['Symbol', 'prototype'], + '%SyntaxErrorPrototype%': ['SyntaxError', 'prototype'], + '%TypedArrayPrototype%': ['TypedArray', 'prototype'], + '%TypeErrorPrototype%': ['TypeError', 'prototype'], + '%Uint8ArrayPrototype%': ['Uint8Array', 'prototype'], + '%Uint8ClampedArrayPrototype%': ['Uint8ClampedArray', 'prototype'], + '%Uint16ArrayPrototype%': ['Uint16Array', 'prototype'], + '%Uint32ArrayPrototype%': ['Uint32Array', 'prototype'], + '%URIErrorPrototype%': ['URIError', 'prototype'], + '%WeakMapPrototype%': ['WeakMap', 'prototype'], + '%WeakSetPrototype%': ['WeakSet', 'prototype'] +}; + +var bind = __nccwpck_require__(88334); +var hasOwn = __nccwpck_require__(62157); +var $concat = bind.call(Function.call, Array.prototype.concat); +var $spliceApply = bind.call(Function.apply, Array.prototype.splice); +var $replace = bind.call(Function.call, String.prototype.replace); +var $strSlice = bind.call(Function.call, String.prototype.slice); +var $exec = bind.call(Function.call, RegExp.prototype.exec); + +/* adapted from https://github.com/lodash/lodash/blob/4.17.15/dist/lodash.js#L6735-L6744 */ +var rePropName = /[^%.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|%$))/g; +var reEscapeChar = /\\(\\)?/g; /** Used to match backslashes in property paths. */ +var stringToPath = function stringToPath(string) { + var first = $strSlice(string, 0, 1); + var last = $strSlice(string, -1); + if (first === '%' && last !== '%') { + throw new $SyntaxError('invalid intrinsic syntax, expected closing `%`'); + } else if (last === '%' && first !== '%') { + throw new $SyntaxError('invalid intrinsic syntax, expected opening `%`'); + } + var result = []; + $replace(string, rePropName, function (match, number, quote, subString) { + result[result.length] = quote ? $replace(subString, reEscapeChar, '$1') : number || match; + }); + return result; +}; +/* end adaptation */ + +var getBaseIntrinsic = function getBaseIntrinsic(name, allowMissing) { + var intrinsicName = name; + var alias; + if (hasOwn(LEGACY_ALIASES, intrinsicName)) { + alias = LEGACY_ALIASES[intrinsicName]; + intrinsicName = '%' + alias[0] + '%'; + } + + if (hasOwn(INTRINSICS, intrinsicName)) { + var value = INTRINSICS[intrinsicName]; + if (value === needsEval) { + value = doEval(intrinsicName); + } + if (typeof value === 'undefined' && !allowMissing) { + throw new $TypeError('intrinsic ' + name + ' exists, but is not available. Please file an issue!'); + } + + return { + alias: alias, + name: intrinsicName, + value: value + }; + } + + throw new $SyntaxError('intrinsic ' + name + ' does not exist!'); +}; + +module.exports = function GetIntrinsic(name, allowMissing) { + if (typeof name !== 'string' || name.length === 0) { + throw new $TypeError('intrinsic name must be a non-empty string'); + } + if (arguments.length > 1 && typeof allowMissing !== 'boolean') { + throw new $TypeError('"allowMissing" argument must be a boolean'); + } + + if ($exec(/^%?[^%]*%?$/, name) === null) { + throw new $SyntaxError('`%` may not be present anywhere but at the beginning and end of the intrinsic name'); + } + var parts = stringToPath(name); + var intrinsicBaseName = parts.length > 0 ? parts[0] : ''; + + var intrinsic = getBaseIntrinsic('%' + intrinsicBaseName + '%', allowMissing); + var intrinsicRealName = intrinsic.name; + var value = intrinsic.value; + var skipFurtherCaching = false; + + var alias = intrinsic.alias; + if (alias) { + intrinsicBaseName = alias[0]; + $spliceApply(parts, $concat([0, 1], alias)); + } + + for (var i = 1, isOwn = true; i < parts.length; i += 1) { + var part = parts[i]; + var first = $strSlice(part, 0, 1); + var last = $strSlice(part, -1); + if ( + ( + (first === '"' || first === "'" || first === '`') + || (last === '"' || last === "'" || last === '`') + ) + && first !== last + ) { + throw new $SyntaxError('property names with quotes must have matching quotes'); + } + if (part === 'constructor' || !isOwn) { + skipFurtherCaching = true; + } + + intrinsicBaseName += '.' + part; + intrinsicRealName = '%' + intrinsicBaseName + '%'; + + if (hasOwn(INTRINSICS, intrinsicRealName)) { + value = INTRINSICS[intrinsicRealName]; + } else if (value != null) { + if (!(part in value)) { + if (!allowMissing) { + throw new $TypeError('base intrinsic for ' + name + ' exists, but the property is not available.'); + } + return void undefined; + } + if ($gOPD && (i + 1) >= parts.length) { + var desc = $gOPD(value, part); + isOwn = !!desc; + + // By convention, when a data property is converted to an accessor + // property to emulate a data property that does not suffer from + // the override mistake, that accessor's getter is marked with + // an `originalValue` property. Here, when we detect this, we + // uphold the illusion by pretending to see that original data + // property, i.e., returning the value rather than the getter + // itself. + if (isOwn && 'get' in desc && !('originalValue' in desc.get)) { + value = desc.get; + } else { + value = value[part]; + } + } else { + isOwn = hasOwn(value, part); + value = value[part]; + } + + if (isOwn && !skipFurtherCaching) { + INTRINSICS[intrinsicRealName] = value; + } + } + } + return value; +}; + + +/***/ }), + +/***/ 95284: +/***/ ((module) => { + +"use strict"; + + +function ready(flagOrFunction) { + this._ready = !!this._ready; + this._readyCallbacks = this._readyCallbacks || []; + + if (arguments.length === 0) { + // return a promise + // support `this.ready().then(onready);` and `yield this.ready()`; + return new Promise(function (resolve) { + if (this._ready) { + return resolve(); + } + this._readyCallbacks.push(resolve); + }.bind(this)); + } else if (typeof flagOrFunction === 'function') { + this._readyCallbacks.push(flagOrFunction); + } else { + this._ready = !!flagOrFunction; } - return str.slice(lead, trail + 1) + if (this._ready) { + this._readyCallbacks.splice(0, Infinity).forEach(function(callback) { + process.nextTick(callback); + }); + } } -module.exports = { - dataURLProcessor, - URLSerializer, - collectASequenceOfCodePoints, - collectASequenceOfCodePointsFast, - stringPercentDecode, - parseMIMEType, - collectAnHTTPQuotedString, - serializeAMimeType +function mixin(object) { + object.ready = ready; } +module.exports = mixin; +module.exports.mixin = mixin; + /***/ }), -/***/ 8511: +/***/ 18501: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -const { Blob, File: NativeFile } = __nccwpck_require__(4300) -const { types } = __nccwpck_require__(3837) -const { kState } = __nccwpck_require__(5861) -const { isBlobLike } = __nccwpck_require__(2538) -const { webidl } = __nccwpck_require__(1744) -const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(685) -const { kEnumerableProperty } = __nccwpck_require__(3983) -const encoder = new TextEncoder() +var GetIntrinsic = __nccwpck_require__(74538); -class File extends Blob { - constructor (fileBits, fileName, options = {}) { - // The File constructor is invoked with two or three parameters, depending - // on whether the optional dictionary parameter is used. When the File() - // constructor is invoked, user agents must run the following steps: - webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' }) - - fileBits = webidl.converters['sequence'](fileBits) - fileName = webidl.converters.USVString(fileName) - options = webidl.converters.FilePropertyBag(options) +var $gOPD = GetIntrinsic('%Object.getOwnPropertyDescriptor%', true); - // 1. Let bytes be the result of processing blob parts given fileBits and - // options. - // Note: Blob handles this for us +if ($gOPD) { + try { + $gOPD([], 'length'); + } catch (e) { + // IE 8 has a broken gOPD + $gOPD = null; + } +} - // 2. Let n be the fileName argument to the constructor. - const n = fileName +module.exports = $gOPD; - // 3. Process FilePropertyBag dictionary argument by running the following - // substeps: - // 1. If the type member is provided and is not the empty string, let t - // be set to the type dictionary member. If t contains any characters - // outside the range U+0020 to U+007E, then set t to the empty string - // and return from these substeps. - // 2. Convert every character in t to ASCII lowercase. - let t = options.type - let d +/***/ }), - // eslint-disable-next-line no-labels - substep: { - if (t) { - t = parseMIMEType(t) +/***/ 67356: +/***/ ((module) => { - if (t === 'failure') { - t = '' - // eslint-disable-next-line no-labels - break substep - } +"use strict"; - t = serializeAMimeType(t).toLowerCase() - } - // 3. If the lastModified member is provided, let d be set to the - // lastModified dictionary member. If it is not provided, set d to the - // current date and time represented as the number of milliseconds since - // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). - d = options.lastModified - } +module.exports = clone - // 4. Return a new File object F such that: - // F refers to the bytes byte sequence. - // F.size is set to the number of total bytes in bytes. - // F.name is set to n. - // F.type is set to t. - // F.lastModified is set to d. +var getPrototypeOf = Object.getPrototypeOf || function (obj) { + return obj.__proto__ +} - super(processBlobParts(fileBits, options), { type: t }) - this[kState] = { - name: n, - lastModified: d, - type: t - } - } +function clone (obj) { + if (obj === null || typeof obj !== 'object') + return obj - get name () { - webidl.brandCheck(this, File) + if (obj instanceof Object) + var copy = { __proto__: getPrototypeOf(obj) } + else + var copy = Object.create(null) - return this[kState].name - } + Object.getOwnPropertyNames(obj).forEach(function (key) { + Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) + }) - get lastModified () { - webidl.brandCheck(this, File) + return copy +} - return this[kState].lastModified - } - get type () { - webidl.brandCheck(this, File) +/***/ }), - return this[kState].type - } -} +/***/ 77758: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -class FileLike { - constructor (blobLike, fileName, options = {}) { - // TODO: argument idl type check +var fs = __nccwpck_require__(57147) +var polyfills = __nccwpck_require__(20263) +var legacy = __nccwpck_require__(73086) +var clone = __nccwpck_require__(67356) - // The File constructor is invoked with two or three parameters, depending - // on whether the optional dictionary parameter is used. When the File() - // constructor is invoked, user agents must run the following steps: +var util = __nccwpck_require__(73837) - // 1. Let bytes be the result of processing blob parts given fileBits and - // options. +/* istanbul ignore next - node 0.x polyfill */ +var gracefulQueue +var previousSymbol - // 2. Let n be the fileName argument to the constructor. - const n = fileName +/* istanbul ignore else - node 0.x polyfill */ +if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { + gracefulQueue = Symbol.for('graceful-fs.queue') + // This is used in testing by future versions + previousSymbol = Symbol.for('graceful-fs.previous') +} else { + gracefulQueue = '___graceful-fs.queue' + previousSymbol = '___graceful-fs.previous' +} - // 3. Process FilePropertyBag dictionary argument by running the following - // substeps: +function noop () {} - // 1. If the type member is provided and is not the empty string, let t - // be set to the type dictionary member. If t contains any characters - // outside the range U+0020 to U+007E, then set t to the empty string - // and return from these substeps. - // TODO - const t = options.type +function publishQueue(context, queue) { + Object.defineProperty(context, gracefulQueue, { + get: function() { + return queue + } + }) +} - // 2. Convert every character in t to ASCII lowercase. - // TODO +var debug = noop +if (util.debuglog) + debug = util.debuglog('gfs4') +else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) + debug = function() { + var m = util.format.apply(util, arguments) + m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') + console.error(m) + } + +// Once time initialization +if (!fs[gracefulQueue]) { + // This queue can be shared by multiple loaded instances + var queue = global[gracefulQueue] || [] + publishQueue(fs, queue) + + // Patch fs.close/closeSync to shared queue version, because we need + // to retry() whenever a close happens *anywhere* in the program. + // This is essential when multiple graceful-fs instances are + // in play at the same time. + fs.close = (function (fs$close) { + function close (fd, cb) { + return fs$close.call(fs, fd, function (err) { + // This function uses the graceful-fs shared queue + if (!err) { + resetQueue() + } - // 3. If the lastModified member is provided, let d be set to the - // lastModified dictionary member. If it is not provided, set d to the - // current date and time represented as the number of milliseconds since - // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). - const d = options.lastModified ?? Date.now() + if (typeof cb === 'function') + cb.apply(this, arguments) + }) + } - // 4. Return a new File object F such that: - // F refers to the bytes byte sequence. - // F.size is set to the number of total bytes in bytes. - // F.name is set to n. - // F.type is set to t. - // F.lastModified is set to d. + Object.defineProperty(close, previousSymbol, { + value: fs$close + }) + return close + })(fs.close) - this[kState] = { - blobLike, - name: n, - type: t, - lastModified: d + fs.closeSync = (function (fs$closeSync) { + function closeSync (fd) { + // This function uses the graceful-fs shared queue + fs$closeSync.apply(fs, arguments) + resetQueue() } - } - stream (...args) { - webidl.brandCheck(this, FileLike) + Object.defineProperty(closeSync, previousSymbol, { + value: fs$closeSync + }) + return closeSync + })(fs.closeSync) - return this[kState].blobLike.stream(...args) + if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { + process.on('exit', function() { + debug(fs[gracefulQueue]) + __nccwpck_require__(39491).equal(fs[gracefulQueue].length, 0) + }) } +} - arrayBuffer (...args) { - webidl.brandCheck(this, FileLike) +if (!global[gracefulQueue]) { + publishQueue(global, fs[gracefulQueue]); +} - return this[kState].blobLike.arrayBuffer(...args) - } +module.exports = patch(clone(fs)) +if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { + module.exports = patch(fs) + fs.__patched = true; +} - slice (...args) { - webidl.brandCheck(this, FileLike) +function patch (fs) { + // Everything that references the open() function needs to be in here + polyfills(fs) + fs.gracefulify = patch - return this[kState].blobLike.slice(...args) + fs.createReadStream = createReadStream + fs.createWriteStream = createWriteStream + var fs$readFile = fs.readFile + fs.readFile = readFile + function readFile (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$readFile(path, options, cb) + + function go$readFile (path, options, cb, startTime) { + return fs$readFile(path, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } } - text (...args) { - webidl.brandCheck(this, FileLike) + var fs$writeFile = fs.writeFile + fs.writeFile = writeFile + function writeFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null - return this[kState].blobLike.text(...args) + return go$writeFile(path, data, options, cb) + + function go$writeFile (path, data, options, cb, startTime) { + return fs$writeFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } } - get size () { - webidl.brandCheck(this, FileLike) + var fs$appendFile = fs.appendFile + if (fs$appendFile) + fs.appendFile = appendFile + function appendFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null - return this[kState].blobLike.size + return go$appendFile(path, data, options, cb) + + function go$appendFile (path, data, options, cb, startTime) { + return fs$appendFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } } - get type () { - webidl.brandCheck(this, FileLike) + var fs$copyFile = fs.copyFile + if (fs$copyFile) + fs.copyFile = copyFile + function copyFile (src, dest, flags, cb) { + if (typeof flags === 'function') { + cb = flags + flags = 0 + } + return go$copyFile(src, dest, flags, cb) - return this[kState].blobLike.type + function go$copyFile (src, dest, flags, cb, startTime) { + return fs$copyFile(src, dest, flags, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } } - get name () { - webidl.brandCheck(this, FileLike) + var fs$readdir = fs.readdir + fs.readdir = readdir + var noReaddirOptionVersions = /^v[0-5]\./ + function readdir (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null - return this[kState].name + var go$readdir = noReaddirOptionVersions.test(process.version) + ? function go$readdir (path, options, cb, startTime) { + return fs$readdir(path, fs$readdirCallback( + path, options, cb, startTime + )) + } + : function go$readdir (path, options, cb, startTime) { + return fs$readdir(path, options, fs$readdirCallback( + path, options, cb, startTime + )) + } + + return go$readdir(path, options, cb) + + function fs$readdirCallback (path, options, cb, startTime) { + return function (err, files) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([ + go$readdir, + [path, options, cb], + err, + startTime || Date.now(), + Date.now() + ]) + else { + if (files && files.sort) + files.sort() + + if (typeof cb === 'function') + cb.call(this, err, files) + } + } + } } - get lastModified () { - webidl.brandCheck(this, FileLike) + if (process.version.substr(0, 4) === 'v0.8') { + var legStreams = legacy(fs) + ReadStream = legStreams.ReadStream + WriteStream = legStreams.WriteStream + } - return this[kState].lastModified + var fs$ReadStream = fs.ReadStream + if (fs$ReadStream) { + ReadStream.prototype = Object.create(fs$ReadStream.prototype) + ReadStream.prototype.open = ReadStream$open } - get [Symbol.toStringTag] () { - return 'File' + var fs$WriteStream = fs.WriteStream + if (fs$WriteStream) { + WriteStream.prototype = Object.create(fs$WriteStream.prototype) + WriteStream.prototype.open = WriteStream$open } -} -Object.defineProperties(File.prototype, { - [Symbol.toStringTag]: { - value: 'File', + Object.defineProperty(fs, 'ReadStream', { + get: function () { + return ReadStream + }, + set: function (val) { + ReadStream = val + }, + enumerable: true, configurable: true - }, - name: kEnumerableProperty, - lastModified: kEnumerableProperty -}) - -webidl.converters.Blob = webidl.interfaceConverter(Blob) + }) + Object.defineProperty(fs, 'WriteStream', { + get: function () { + return WriteStream + }, + set: function (val) { + WriteStream = val + }, + enumerable: true, + configurable: true + }) -webidl.converters.BlobPart = function (V, opts) { - if (webidl.util.Type(V) === 'Object') { - if (isBlobLike(V)) { - return webidl.converters.Blob(V, { strict: false }) - } + // legacy names + var FileReadStream = ReadStream + Object.defineProperty(fs, 'FileReadStream', { + get: function () { + return FileReadStream + }, + set: function (val) { + FileReadStream = val + }, + enumerable: true, + configurable: true + }) + var FileWriteStream = WriteStream + Object.defineProperty(fs, 'FileWriteStream', { + get: function () { + return FileWriteStream + }, + set: function (val) { + FileWriteStream = val + }, + enumerable: true, + configurable: true + }) - if ( - ArrayBuffer.isView(V) || - types.isAnyArrayBuffer(V) - ) { - return webidl.converters.BufferSource(V, opts) - } + function ReadStream (path, options) { + if (this instanceof ReadStream) + return fs$ReadStream.apply(this, arguments), this + else + return ReadStream.apply(Object.create(ReadStream.prototype), arguments) } - return webidl.converters.USVString(V, opts) -} + function ReadStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + if (that.autoClose) + that.destroy() -webidl.converters['sequence'] = webidl.sequenceConverter( - webidl.converters.BlobPart -) + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + that.read() + } + }) + } -// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag -webidl.converters.FilePropertyBag = webidl.dictionaryConverter([ - { - key: 'lastModified', - converter: webidl.converters['long long'], - get defaultValue () { - return Date.now() - } - }, - { - key: 'type', - converter: webidl.converters.DOMString, - defaultValue: '' - }, - { - key: 'endings', - converter: (value) => { - value = webidl.converters.DOMString(value) - value = value.toLowerCase() + function WriteStream (path, options) { + if (this instanceof WriteStream) + return fs$WriteStream.apply(this, arguments), this + else + return WriteStream.apply(Object.create(WriteStream.prototype), arguments) + } - if (value !== 'native') { - value = 'transparent' + function WriteStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + that.destroy() + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) } + }) + } - return value - }, - defaultValue: 'transparent' + function createReadStream (path, options) { + return new fs.ReadStream(path, options) } -]) -/** - * @see https://www.w3.org/TR/FileAPI/#process-blob-parts - * @param {(NodeJS.TypedArray|Blob|string)[]} parts - * @param {{ type: string, endings: string }} options - */ -function processBlobParts (parts, options) { - // 1. Let bytes be an empty sequence of bytes. - /** @type {NodeJS.TypedArray[]} */ - const bytes = [] + function createWriteStream (path, options) { + return new fs.WriteStream(path, options) + } - // 2. For each element in parts: - for (const element of parts) { - // 1. If element is a USVString, run the following substeps: - if (typeof element === 'string') { - // 1. Let s be element. - let s = element + var fs$open = fs.open + fs.open = open + function open (path, flags, mode, cb) { + if (typeof mode === 'function') + cb = mode, mode = null - // 2. If the endings member of options is "native", set s - // to the result of converting line endings to native - // of element. - if (options.endings === 'native') { - s = convertLineEndingsNative(s) - } + return go$open(path, flags, mode, cb) - // 3. Append the result of UTF-8 encoding s to bytes. - bytes.push(encoder.encode(s)) - } else if ( - types.isAnyArrayBuffer(element) || - types.isTypedArray(element) - ) { - // 2. If element is a BufferSource, get a copy of the - // bytes held by the buffer source, and append those - // bytes to bytes. - if (!element.buffer) { // ArrayBuffer - bytes.push(new Uint8Array(element)) - } else { - bytes.push( - new Uint8Array(element.buffer, element.byteOffset, element.byteLength) - ) - } - } else if (isBlobLike(element)) { - // 3. If element is a Blob, append the bytes it represents - // to bytes. - bytes.push(element) + function go$open (path, flags, mode, cb, startTime) { + return fs$open(path, flags, mode, function (err, fd) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) } } - // 3. Return bytes. - return bytes + return fs } -/** - * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native - * @param {string} s - */ -function convertLineEndingsNative (s) { - // 1. Let native line ending be be the code point U+000A LF. - let nativeLineEnding = '\n' +function enqueue (elem) { + debug('ENQUEUE', elem[0].name, elem[1]) + fs[gracefulQueue].push(elem) + retry() +} - // 2. If the underlying platform’s conventions are to - // represent newlines as a carriage return and line feed - // sequence, set native line ending to the code point - // U+000D CR followed by the code point U+000A LF. - if (process.platform === 'win32') { - nativeLineEnding = '\r\n' - } +// keep track of the timeout between retry() calls +var retryTimer - return s.replace(/\r?\n/g, nativeLineEnding) +// reset the startTime and lastTime to now +// this resets the start of the 60 second overall timeout as well as the +// delay between attempts so that we'll retry these jobs sooner +function resetQueue () { + var now = Date.now() + for (var i = 0; i < fs[gracefulQueue].length; ++i) { + // entries that are only a length of 2 are from an older version, don't + // bother modifying those since they'll be retried anyway. + if (fs[gracefulQueue][i].length > 2) { + fs[gracefulQueue][i][3] = now // startTime + fs[gracefulQueue][i][4] = now // lastTime + } + } + // call retry to make sure we're actively processing the queue + retry() } -// If this function is moved to ./util.js, some tools (such as -// rollup) will warn about circular dependencies. See: -// https://github.com/nodejs/undici/issues/1629 -function isFileLike (object) { - return ( - (NativeFile && object instanceof NativeFile) || - object instanceof File || ( - object && - (typeof object.stream === 'function' || - typeof object.arrayBuffer === 'function') && - object[Symbol.toStringTag] === 'File' - ) - ) -} +function retry () { + // clear the timer and remove it to help prevent unintended concurrency + clearTimeout(retryTimer) + retryTimer = undefined -module.exports = { File, FileLike, isFileLike } + if (fs[gracefulQueue].length === 0) + return + + var elem = fs[gracefulQueue].shift() + var fn = elem[0] + var args = elem[1] + // these items may be unset if they were added by an older graceful-fs + var err = elem[2] + var startTime = elem[3] + var lastTime = elem[4] + + // if we don't have a startTime we have no way of knowing if we've waited + // long enough, so go ahead and retry this item now + if (startTime === undefined) { + debug('RETRY', fn.name, args) + fn.apply(null, args) + } else if (Date.now() - startTime >= 60000) { + // it's been more than 60 seconds total, bail now + debug('TIMEOUT', fn.name, args) + var cb = args.pop() + if (typeof cb === 'function') + cb.call(null, err) + } else { + // the amount of time between the last attempt and right now + var sinceAttempt = Date.now() - lastTime + // the amount of time between when we first tried, and when we last tried + // rounded up to at least 1 + var sinceStart = Math.max(lastTime - startTime, 1) + // backoff. wait longer than the total time we've been retrying, but only + // up to a maximum of 100ms + var desiredDelay = Math.min(sinceStart * 1.2, 100) + // it's been long enough since the last retry, do it again + if (sinceAttempt >= desiredDelay) { + debug('RETRY', fn.name, args) + fn.apply(null, args.concat([startTime])) + } else { + // if we can't do this job yet, push it to the end of the queue + // and let the next iteration check again + fs[gracefulQueue].push(elem) + } + } + + // schedule our next run if one isn't already scheduled + if (retryTimer === undefined) { + retryTimer = setTimeout(retry, 0) + } +} /***/ }), -/***/ 2015: +/***/ 73086: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var Stream = (__nccwpck_require__(12781).Stream) +module.exports = legacy + +function legacy (fs) { + return { + ReadStream: ReadStream, + WriteStream: WriteStream + } -const { isBlobLike, toUSVString, makeIterator } = __nccwpck_require__(2538) -const { kState } = __nccwpck_require__(5861) -const { File: UndiciFile, FileLike, isFileLike } = __nccwpck_require__(8511) -const { webidl } = __nccwpck_require__(1744) -const { Blob, File: NativeFile } = __nccwpck_require__(4300) + function ReadStream (path, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path, options); -/** @type {globalThis['File']} */ -const File = NativeFile ?? UndiciFile + Stream.call(this); -// https://xhr.spec.whatwg.org/#formdata -class FormData { - constructor (form) { - if (form !== undefined) { - throw webidl.errors.conversionFailed({ - prefix: 'FormData constructor', - argument: 'Argument 1', - types: ['undefined'] - }) - } + var self = this; - this[kState] = [] - } + this.path = path; + this.fd = null; + this.readable = true; + this.paused = false; - append (name, value, filename = undefined) { - webidl.brandCheck(this, FormData) + this.flags = 'r'; + this.mode = 438; /*=0666*/ + this.bufferSize = 64 * 1024; - webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' }) + options = options || {}; - if (arguments.length === 3 && !isBlobLike(value)) { - throw new TypeError( - "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'" - ) + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; } - // 1. Let value be value if given; otherwise blobValue. - - name = webidl.converters.USVString(name) - value = isBlobLike(value) - ? webidl.converters.Blob(value, { strict: false }) - : webidl.converters.USVString(value) - filename = arguments.length === 3 - ? webidl.converters.USVString(filename) - : undefined + if (this.encoding) this.setEncoding(this.encoding); - // 2. Let entry be the result of creating an entry with - // name, value, and filename if given. - const entry = makeEntry(name, value, filename) + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.end === undefined) { + this.end = Infinity; + } else if ('number' !== typeof this.end) { + throw TypeError('end must be a Number'); + } - // 3. Append entry to this’s entry list. - this[kState].push(entry) - } + if (this.start > this.end) { + throw new Error('start must be <= end'); + } - delete (name) { - webidl.brandCheck(this, FormData) + this.pos = this.start; + } - webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' }) + if (this.fd !== null) { + process.nextTick(function() { + self._read(); + }); + return; + } - name = webidl.converters.USVString(name) + fs.open(this.path, this.flags, this.mode, function (err, fd) { + if (err) { + self.emit('error', err); + self.readable = false; + return; + } - // The delete(name) method steps are to remove all entries whose name - // is name from this’s entry list. - this[kState] = this[kState].filter(entry => entry.name !== name) + self.fd = fd; + self.emit('open', fd); + self._read(); + }) } - get (name) { - webidl.brandCheck(this, FormData) + function WriteStream (path, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path, options); - webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' }) + Stream.call(this); - name = webidl.converters.USVString(name) + this.path = path; + this.fd = null; + this.writable = true; - // 1. If there is no entry whose name is name in this’s entry list, - // then return null. - const idx = this[kState].findIndex((entry) => entry.name === name) - if (idx === -1) { - return null - } + this.flags = 'w'; + this.encoding = 'binary'; + this.mode = 438; /*=0666*/ + this.bytesWritten = 0; - // 2. Return the value of the first entry whose name is name from - // this’s entry list. - return this[kState][idx].value - } + options = options || {}; - getAll (name) { - webidl.brandCheck(this, FormData) + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } - webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' }) + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.start < 0) { + throw new Error('start must be >= zero'); + } - name = webidl.converters.USVString(name) + this.pos = this.start; + } - // 1. If there is no entry whose name is name in this’s entry list, - // then return the empty list. - // 2. Return the values of all entries whose name is name, in order, - // from this’s entry list. - return this[kState] - .filter((entry) => entry.name === name) - .map((entry) => entry.value) + this.busy = false; + this._queue = []; + + if (this.fd === null) { + this._open = fs.open; + this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); + this.flush(); + } } +} - has (name) { - webidl.brandCheck(this, FormData) - webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' }) +/***/ }), - name = webidl.converters.USVString(name) +/***/ 20263: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // The has(name) method steps are to return true if there is an entry - // whose name is name in this’s entry list; otherwise false. - return this[kState].findIndex((entry) => entry.name === name) !== -1 - } +var constants = __nccwpck_require__(22057) - set (name, value, filename = undefined) { - webidl.brandCheck(this, FormData) +var origCwd = process.cwd +var cwd = null - webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' }) +var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform - if (arguments.length === 3 && !isBlobLike(value)) { - throw new TypeError( - "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'" - ) +process.cwd = function() { + if (!cwd) + cwd = origCwd.call(process) + return cwd +} +try { + process.cwd() +} catch (er) {} + +// This check is needed until node.js 12 is required +if (typeof process.chdir === 'function') { + var chdir = process.chdir + process.chdir = function (d) { + cwd = null + chdir.call(process, d) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) +} + +module.exports = patch + +function patch (fs) { + // (re-)implement some things that are known busted or missing. + + // lchmod, broken prior to 0.6.2 + // back-port the fix here. + if (constants.hasOwnProperty('O_SYMLINK') && + process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { + patchLchmod(fs) + } + + // lutimes implementation, or no-op + if (!fs.lutimes) { + patchLutimes(fs) + } + + // https://github.com/isaacs/node-graceful-fs/issues/4 + // Chown should not fail on einval or eperm if non-root. + // It should not fail on enosys ever, as this just indicates + // that a fs doesn't support the intended operation. + + fs.chown = chownFix(fs.chown) + fs.fchown = chownFix(fs.fchown) + fs.lchown = chownFix(fs.lchown) + + fs.chmod = chmodFix(fs.chmod) + fs.fchmod = chmodFix(fs.fchmod) + fs.lchmod = chmodFix(fs.lchmod) + + fs.chownSync = chownFixSync(fs.chownSync) + fs.fchownSync = chownFixSync(fs.fchownSync) + fs.lchownSync = chownFixSync(fs.lchownSync) + + fs.chmodSync = chmodFixSync(fs.chmodSync) + fs.fchmodSync = chmodFixSync(fs.fchmodSync) + fs.lchmodSync = chmodFixSync(fs.lchmodSync) + + fs.stat = statFix(fs.stat) + fs.fstat = statFix(fs.fstat) + fs.lstat = statFix(fs.lstat) + + fs.statSync = statFixSync(fs.statSync) + fs.fstatSync = statFixSync(fs.fstatSync) + fs.lstatSync = statFixSync(fs.lstatSync) + + // if lchmod/lchown do not exist, then make them no-ops + if (fs.chmod && !fs.lchmod) { + fs.lchmod = function (path, mode, cb) { + if (cb) process.nextTick(cb) + } + fs.lchmodSync = function () {} + } + if (fs.chown && !fs.lchown) { + fs.lchown = function (path, uid, gid, cb) { + if (cb) process.nextTick(cb) + } + fs.lchownSync = function () {} + } + + // on Windows, A/V software can lock the directory, causing this + // to fail with an EACCES or EPERM if the directory contains newly + // created files. Try again on failure, for up to 60 seconds. + + // Set the timeout this long because some Windows Anti-Virus, such as Parity + // bit9, may lock files for up to a minute, causing npm package install + // failures. Also, take care to yield the scheduler. Windows scheduling gives + // CPU to a busy looping process, which can cause the program causing the lock + // contention to be starved of CPU by node, so the contention doesn't resolve. + if (platform === "win32") { + fs.rename = typeof fs.rename !== 'function' ? fs.rename + : (function (fs$rename) { + function rename (from, to, cb) { + var start = Date.now() + var backoff = 0; + fs$rename(from, to, function CB (er) { + if (er + && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") + && Date.now() - start < 60000) { + setTimeout(function() { + fs.stat(to, function (stater, st) { + if (stater && stater.code === "ENOENT") + fs$rename(from, to, CB); + else + cb(er) + }) + }, backoff) + if (backoff < 100) + backoff += 10; + return; + } + if (cb) cb(er) + }) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) + return rename + })(fs.rename) + } + + // if read() returns EAGAIN, then just try it again. + fs.read = typeof fs.read !== 'function' ? fs.read + : (function (fs$read) { + function read (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) + } + } + return fs$read.call(fs, fd, buffer, offset, length, position, callback) } - // The set(name, value) and set(name, blobValue, filename) method steps - // are: + // This ensures `util.promisify` works as it does for native `fs.read`. + if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) + return read + })(fs.read) - // 1. Let value be value if given; otherwise blobValue. + fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync + : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { + var eagCounter = 0 + while (true) { + try { + return fs$readSync.call(fs, fd, buffer, offset, length, position) + } catch (er) { + if (er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + continue + } + throw er + } + } + }})(fs.readSync) - name = webidl.converters.USVString(name) - value = isBlobLike(value) - ? webidl.converters.Blob(value, { strict: false }) - : webidl.converters.USVString(value) - filename = arguments.length === 3 - ? toUSVString(filename) - : undefined + function patchLchmod (fs) { + fs.lchmod = function (path, mode, callback) { + fs.open( path + , constants.O_WRONLY | constants.O_SYMLINK + , mode + , function (err, fd) { + if (err) { + if (callback) callback(err) + return + } + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, function (err) { + fs.close(fd, function(err2) { + if (callback) callback(err || err2) + }) + }) + }) + } - // 2. Let entry be the result of creating an entry with name, value, and - // filename if given. - const entry = makeEntry(name, value, filename) + fs.lchmodSync = function (path, mode) { + var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) - // 3. If there are entries in this’s entry list whose name is name, then - // replace the first such entry with entry and remove the others. - const idx = this[kState].findIndex((entry) => entry.name === name) - if (idx !== -1) { - this[kState] = [ - ...this[kState].slice(0, idx), - entry, - ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name) - ] - } else { - // 4. Otherwise, append entry to this’s entry list. - this[kState].push(entry) + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + var threw = true + var ret + try { + ret = fs.fchmodSync(fd, mode) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret } } - entries () { - webidl.brandCheck(this, FormData) - - return makeIterator( - () => this[kState].map(pair => [pair.name, pair.value]), - 'FormData', - 'key+value' - ) - } + function patchLutimes (fs) { + if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { + fs.lutimes = function (path, at, mt, cb) { + fs.open(path, constants.O_SYMLINK, function (er, fd) { + if (er) { + if (cb) cb(er) + return + } + fs.futimes(fd, at, mt, function (er) { + fs.close(fd, function (er2) { + if (cb) cb(er || er2) + }) + }) + }) + } - keys () { - webidl.brandCheck(this, FormData) + fs.lutimesSync = function (path, at, mt) { + var fd = fs.openSync(path, constants.O_SYMLINK) + var ret + var threw = true + try { + ret = fs.futimesSync(fd, at, mt) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } - return makeIterator( - () => this[kState].map(pair => [pair.name, pair.value]), - 'FormData', - 'key' - ) + } else if (fs.futimes) { + fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } + fs.lutimesSync = function () {} + } } - values () { - webidl.brandCheck(this, FormData) - - return makeIterator( - () => this[kState].map(pair => [pair.name, pair.value]), - 'FormData', - 'value' - ) + function chmodFix (orig) { + if (!orig) return orig + return function (target, mode, cb) { + return orig.call(fs, target, mode, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } } - /** - * @param {(value: string, key: string, self: FormData) => void} callbackFn - * @param {unknown} thisArg - */ - forEach (callbackFn, thisArg = globalThis) { - webidl.brandCheck(this, FormData) + function chmodFixSync (orig) { + if (!orig) return orig + return function (target, mode) { + try { + return orig.call(fs, target, mode) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } - webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' }) - if (typeof callbackFn !== 'function') { - throw new TypeError( - "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'." - ) + function chownFix (orig) { + if (!orig) return orig + return function (target, uid, gid, cb) { + return orig.call(fs, target, uid, gid, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) } + } - for (const [key, value] of this) { - callbackFn.apply(thisArg, [value, key, this]) + function chownFixSync (orig) { + if (!orig) return orig + return function (target, uid, gid) { + try { + return orig.call(fs, target, uid, gid) + } catch (er) { + if (!chownErOk(er)) throw er + } } } -} - -FormData.prototype[Symbol.iterator] = FormData.prototype.entries -Object.defineProperties(FormData.prototype, { - [Symbol.toStringTag]: { - value: 'FormData', - configurable: true + function statFix (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + function callback (er, stats) { + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + if (cb) cb.apply(this, arguments) + } + return options ? orig.call(fs, target, options, callback) + : orig.call(fs, target, callback) + } } -}) -/** - * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry - * @param {string} name - * @param {string|Blob} value - * @param {?string} filename - * @returns - */ -function makeEntry (name, value, filename) { - // 1. Set name to the result of converting name into a scalar value string. - // "To convert a string into a scalar value string, replace any surrogates - // with U+FFFD." - // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end - name = Buffer.from(name).toString('utf8') + function statFixSync (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options) { + var stats = options ? orig.call(fs, target, options) + : orig.call(fs, target) + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + return stats; + } + } + + // ENOSYS means that the fs doesn't support the op. Just ignore + // that, because it doesn't matter. + // + // if there's no getuid, or if getuid() is something other + // than 0, and the error is EINVAL or EPERM, then just ignore + // it. + // + // This specific case is a silent failure in cp, install, tar, + // and most other unix tools that manage permissions. + // + // When running as root, or if other types of errors are + // encountered, then it's strict. + function chownErOk (er) { + if (!er) + return true - // 2. If value is a string, then set value to the result of converting - // value into a scalar value string. - if (typeof value === 'string') { - value = Buffer.from(value).toString('utf8') - } else { - // 3. Otherwise: + if (er.code === "ENOSYS") + return true - // 1. If value is not a File object, then set value to a new File object, - // representing the same bytes, whose name attribute value is "blob" - if (!isFileLike(value)) { - value = value instanceof Blob - ? new File([value], 'blob', { type: value.type }) - : new FileLike(value, 'blob', { type: value.type }) + var nonroot = !process.getuid || process.getuid() !== 0 + if (nonroot) { + if (er.code === "EINVAL" || er.code === "EPERM") + return true } - // 2. If filename is given, then set value to a new File object, - // representing the same bytes, whose name attribute is filename. - if (filename !== undefined) { - /** @type {FilePropertyBag} */ - const options = { - type: value.type, - lastModified: value.lastModified - } - - value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile - ? new File([value], filename, options) - : new FileLike(value, filename, options) - } + return false } - - // 4. Return an entry whose name is name and whose value is value. - return { name, value } } -module.exports = { FormData } - /***/ }), -/***/ 1246: +/***/ 31621: /***/ ((module) => { "use strict"; -// In case of breaking changes, increase the version -// number to avoid conflicts. -const globalOrigin = Symbol.for('undici.globalOrigin.1') +module.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +}; -function getGlobalOrigin () { - return globalThis[globalOrigin] -} -function setGlobalOrigin (newOrigin) { - if (newOrigin === undefined) { - Object.defineProperty(globalThis, globalOrigin, { - value: undefined, - writable: true, - enumerable: false, - configurable: false - }) +/***/ }), - return - } +/***/ 90176: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - const parsedURL = new URL(newOrigin) +"use strict"; - if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') { - throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`) - } - Object.defineProperty(globalThis, globalOrigin, { - value: parsedURL, - writable: true, - enumerable: false, - configurable: false - }) -} +var $defineProperty = __nccwpck_require__(6123); -module.exports = { - getGlobalOrigin, - setGlobalOrigin -} +var hasPropertyDescriptors = function hasPropertyDescriptors() { + return !!$defineProperty; +}; + +hasPropertyDescriptors.hasArrayLengthDefineBug = function hasArrayLengthDefineBug() { + // node v0.6 has a bug where array lengths can be Set but not Defined + if (!$defineProperty) { + return null; + } + try { + return $defineProperty([], 'length', { value: 1 }).length !== 1; + } catch (e) { + // In Firefox 4-22, defining length on an array throws an exception. + return true; + } +}; + +module.exports = hasPropertyDescriptors; /***/ }), -/***/ 554: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 45894: +/***/ ((module) => { "use strict"; -// https://github.com/Ethan-Arrowood/undici-fetch +var test = { + __proto__: null, + foo: {} +}; -const { kHeadersList, kConstruct } = __nccwpck_require__(2785) -const { kGuard } = __nccwpck_require__(5861) -const { kEnumerableProperty } = __nccwpck_require__(3983) -const { - makeIterator, - isValidHeaderName, - isValidHeaderValue -} = __nccwpck_require__(2538) -const { webidl } = __nccwpck_require__(1744) -const assert = __nccwpck_require__(9491) +var $Object = Object; -const kHeadersMap = Symbol('headers map') -const kHeadersSortedMap = Symbol('headers map sorted') +/** @type {import('.')} */ +module.exports = function hasProto() { + // @ts-expect-error: TS errors on an inherited property for some reason + return { __proto__: test }.foo === test.foo + && !(test instanceof $Object); +}; -/** - * @param {number} code - */ -function isHTTPWhiteSpaceCharCode (code) { - return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 -} -/** - * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize - * @param {string} potentialValue - */ -function headerValueNormalize (potentialValue) { - // To normalize a byte sequence potentialValue, remove - // any leading and trailing HTTP whitespace bytes from - // potentialValue. - let i = 0; let j = potentialValue.length +/***/ }), - while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j - while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i +/***/ 40587: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) -} +"use strict"; -function fill (headers, object) { - // To fill a Headers object headers with a given object object, run these steps: - // 1. If object is a sequence, then for each header in object: - // Note: webidl conversion to array has already been done. - if (Array.isArray(object)) { - for (let i = 0; i < object.length; ++i) { - const header = object[i] - // 1. If header does not contain exactly two items, then throw a TypeError. - if (header.length !== 2) { - throw webidl.errors.exception({ - header: 'Headers constructor', - message: `expected name/value pair to be length 2, found ${header.length}.` - }) - } +var origSymbol = typeof Symbol !== 'undefined' && Symbol; +var hasSymbolSham = __nccwpck_require__(57747); - // 2. Append (header’s first item, header’s second item) to headers. - appendHeader(headers, header[0], header[1]) - } - } else if (typeof object === 'object' && object !== null) { - // Note: null should throw +module.exports = function hasNativeSymbols() { + if (typeof origSymbol !== 'function') { return false; } + if (typeof Symbol !== 'function') { return false; } + if (typeof origSymbol('foo') !== 'symbol') { return false; } + if (typeof Symbol('bar') !== 'symbol') { return false; } - // 2. Otherwise, object is a record, then for each key → value in object, - // append (key, value) to headers - const keys = Object.keys(object) - for (let i = 0; i < keys.length; ++i) { - appendHeader(headers, keys[i], object[keys[i]]) - } - } else { - throw webidl.errors.conversionFailed({ - prefix: 'Headers constructor', - argument: 'Argument 1', - types: ['sequence>', 'record'] - }) - } -} + return hasSymbolSham(); +}; -/** - * @see https://fetch.spec.whatwg.org/#concept-headers-append - */ -function appendHeader (headers, name, value) { - // 1. Normalize value. - value = headerValueNormalize(value) - // 2. If name is not a header name or value is not a - // header value, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.append', - value: name, - type: 'header name' - }) - } else if (!isValidHeaderValue(value)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.append', - value, - type: 'header value' - }) - } +/***/ }), - // 3. If headers’s guard is "immutable", then throw a TypeError. - // 4. Otherwise, if headers’s guard is "request" and name is a - // forbidden header name, return. - // Note: undici does not implement forbidden header names - if (headers[kGuard] === 'immutable') { - throw new TypeError('immutable') - } else if (headers[kGuard] === 'request-no-cors') { - // 5. Otherwise, if headers’s guard is "request-no-cors": - // TODO - } +/***/ 57747: +/***/ ((module) => { - // 6. Otherwise, if headers’s guard is "response" and name is a - // forbidden response-header name, return. +"use strict"; - // 7. Append (name, value) to headers’s header list. - return headers[kHeadersList].append(name, value) - // 8. If headers’s guard is "request-no-cors", then remove - // privileged no-CORS request headers from headers -} +/* eslint complexity: [2, 18], max-statements: [2, 33] */ +module.exports = function hasSymbols() { + if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { return false; } + if (typeof Symbol.iterator === 'symbol') { return true; } -class HeadersList { - /** @type {[string, string][]|null} */ - cookies = null + var obj = {}; + var sym = Symbol('test'); + var symObj = Object(sym); + if (typeof sym === 'string') { return false; } - constructor (init) { - if (init instanceof HeadersList) { - this[kHeadersMap] = new Map(init[kHeadersMap]) - this[kHeadersSortedMap] = init[kHeadersSortedMap] - this.cookies = init.cookies === null ? null : [...init.cookies] - } else { - this[kHeadersMap] = new Map(init) - this[kHeadersSortedMap] = null - } - } + if (Object.prototype.toString.call(sym) !== '[object Symbol]') { return false; } + if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { return false; } - // https://fetch.spec.whatwg.org/#header-list-contains - contains (name) { - // A header list list contains a header name name if list - // contains a header whose name is a byte-case-insensitive - // match for name. - name = name.toLowerCase() + // temp disabled per https://github.com/ljharb/object.assign/issues/17 + // if (sym instanceof Symbol) { return false; } + // temp disabled per https://github.com/WebReflection/get-own-property-symbols/issues/4 + // if (!(symObj instanceof Symbol)) { return false; } - return this[kHeadersMap].has(name) - } + // if (typeof Symbol.prototype.toString !== 'function') { return false; } + // if (String(sym) !== Symbol.prototype.toString.call(sym)) { return false; } - clear () { - this[kHeadersMap].clear() - this[kHeadersSortedMap] = null - this.cookies = null - } + var symVal = 42; + obj[sym] = symVal; + for (sym in obj) { return false; } // eslint-disable-line no-restricted-syntax, no-unreachable-loop + if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { return false; } - // https://fetch.spec.whatwg.org/#concept-header-list-append - append (name, value) { - this[kHeadersSortedMap] = null + if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { return false; } - // 1. If list contains name, then set name to the first such - // header’s name. - const lowercaseName = name.toLowerCase() - const exists = this[kHeadersMap].get(lowercaseName) + var syms = Object.getOwnPropertySymbols(obj); + if (syms.length !== 1 || syms[0] !== sym) { return false; } - // 2. Append (name, value) to list. - if (exists) { - const delimiter = lowercaseName === 'cookie' ? '; ' : ', ' - this[kHeadersMap].set(lowercaseName, { - name: exists.name, - value: `${exists.value}${delimiter}${value}` - }) - } else { - this[kHeadersMap].set(lowercaseName, { name, value }) - } + if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { return false; } - if (lowercaseName === 'set-cookie') { - this.cookies ??= [] - this.cookies.push(value) - } - } + if (typeof Object.getOwnPropertyDescriptor === 'function') { + var descriptor = Object.getOwnPropertyDescriptor(obj, sym); + if (descriptor.value !== symVal || descriptor.enumerable !== true) { return false; } + } - // https://fetch.spec.whatwg.org/#concept-header-list-set - set (name, value) { - this[kHeadersSortedMap] = null - const lowercaseName = name.toLowerCase() + return true; +}; - if (lowercaseName === 'set-cookie') { - this.cookies = [value] - } - // 1. If list contains name, then set the value of - // the first such header to value and remove the - // others. - // 2. Otherwise, append header (name, value) to list. - this[kHeadersMap].set(lowercaseName, { name, value }) - } +/***/ }), - // https://fetch.spec.whatwg.org/#concept-header-list-delete - delete (name) { - this[kHeadersSortedMap] = null +/***/ 62157: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - name = name.toLowerCase() +"use strict"; - if (name === 'set-cookie') { - this.cookies = null - } - this[kHeadersMap].delete(name) - } +var call = Function.prototype.call; +var $hasOwn = Object.prototype.hasOwnProperty; +var bind = __nccwpck_require__(88334); - // https://fetch.spec.whatwg.org/#concept-header-list-get - get (name) { - const value = this[kHeadersMap].get(name.toLowerCase()) +/** @type {import('.')} */ +module.exports = bind.call(call, $hasOwn); - // 1. If list does not contain name, then return null. - // 2. Return the values of all headers in list whose name - // is a byte-case-insensitive match for name, - // separated from each other by 0x2C 0x20, in order. - return value === undefined ? null : value.value - } - * [Symbol.iterator] () { - // use the lowercased name - for (const [name, { value }] of this[kHeadersMap]) { - yield [name, value] - } - } +/***/ }), - get entries () { - const headers = {} +/***/ 10845: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (this[kHeadersMap].size) { - for (const { name, value } of this[kHeadersMap].values()) { - headers[name] = value - } - } +"use strict"; +/*! + * humanize-ms - index.js + * Copyright(c) 2014 dead_horse + * MIT Licensed + */ - return headers - } -} -// https://fetch.spec.whatwg.org/#headers-class -class Headers { - constructor (init = undefined) { - if (init === kConstruct) { - return - } - this[kHeadersList] = new HeadersList() - // The new Headers(init) constructor steps are: +/** + * Module dependencies. + */ - // 1. Set this’s guard to "none". - this[kGuard] = 'none' +var util = __nccwpck_require__(73837); +var ms = __nccwpck_require__(80900); - // 2. If init is given, then fill this with init. - if (init !== undefined) { - init = webidl.converters.HeadersInit(init) - fill(this, init) - } +module.exports = function (t) { + if (typeof t === 'number') return t; + var r = ms(t); + if (r === undefined) { + var err = new Error(util.format('humanize-ms(%j) result undefined', t)); + console.warn(err.stack); } + return r; +}; - // https://fetch.spec.whatwg.org/#dom-headers-append - append (name, value) { - webidl.brandCheck(this, Headers) - webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' }) +/***/ }), - name = webidl.converters.ByteString(name) - value = webidl.converters.ByteString(value) +/***/ 39695: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return appendHeader(this, name, value) - } +"use strict"; - // https://fetch.spec.whatwg.org/#dom-headers-delete - delete (name) { - webidl.brandCheck(this, Headers) +var Buffer = (__nccwpck_require__(15118).Buffer); + +// Multibyte codec. In this scheme, a character is represented by 1 or more bytes. +// Our codec supports UTF-16 surrogates, extensions for GB18030 and unicode sequences. +// To save memory and loading time, we read table files only when requested. + +exports._dbcs = DBCSCodec; + +var UNASSIGNED = -1, + GB18030_CODE = -2, + SEQ_START = -10, + NODE_START = -1000, + UNASSIGNED_NODE = new Array(0x100), + DEF_CHAR = -1; + +for (var i = 0; i < 0x100; i++) + UNASSIGNED_NODE[i] = UNASSIGNED; + + +// Class DBCSCodec reads and initializes mapping tables. +function DBCSCodec(codecOptions, iconv) { + this.encodingName = codecOptions.encodingName; + if (!codecOptions) + throw new Error("DBCS codec is called without the data.") + if (!codecOptions.table) + throw new Error("Encoding '" + this.encodingName + "' has no data."); + + // Load tables. + var mappingTable = codecOptions.table(); + + + // Decode tables: MBCS -> Unicode. + + // decodeTables is a trie, encoded as an array of arrays of integers. Internal arrays are trie nodes and all have len = 256. + // Trie root is decodeTables[0]. + // Values: >= 0 -> unicode character code. can be > 0xFFFF + // == UNASSIGNED -> unknown/unassigned sequence. + // == GB18030_CODE -> this is the end of a GB18030 4-byte sequence. + // <= NODE_START -> index of the next node in our trie to process next byte. + // <= SEQ_START -> index of the start of a character code sequence, in decodeTableSeq. + this.decodeTables = []; + this.decodeTables[0] = UNASSIGNED_NODE.slice(0); // Create root node. + + // Sometimes a MBCS char corresponds to a sequence of unicode chars. We store them as arrays of integers here. + this.decodeTableSeq = []; + + // Actual mapping tables consist of chunks. Use them to fill up decode tables. + for (var i = 0; i < mappingTable.length; i++) + this._addDecodeChunk(mappingTable[i]); + + this.defaultCharUnicode = iconv.defaultCharUnicode; + + + // Encode tables: Unicode -> DBCS. + + // `encodeTable` is array mapping from unicode char to encoded char. All its values are integers for performance. + // Because it can be sparse, it is represented as array of buckets by 256 chars each. Bucket can be null. + // Values: >= 0 -> it is a normal char. Write the value (if <=256 then 1 byte, if <=65536 then 2 bytes, etc.). + // == UNASSIGNED -> no conversion found. Output a default char. + // <= SEQ_START -> it's an index in encodeTableSeq, see below. The character starts a sequence. + this.encodeTable = []; + + // `encodeTableSeq` is used when a sequence of unicode characters is encoded as a single code. We use a tree of + // objects where keys correspond to characters in sequence and leafs are the encoded dbcs values. A special DEF_CHAR key + // means end of sequence (needed when one sequence is a strict subsequence of another). + // Objects are kept separately from encodeTable to increase performance. + this.encodeTableSeq = []; + + // Some chars can be decoded, but need not be encoded. + var skipEncodeChars = {}; + if (codecOptions.encodeSkipVals) + for (var i = 0; i < codecOptions.encodeSkipVals.length; i++) { + var val = codecOptions.encodeSkipVals[i]; + if (typeof val === 'number') + skipEncodeChars[val] = true; + else + for (var j = val.from; j <= val.to; j++) + skipEncodeChars[j] = true; + } + + // Use decode trie to recursively fill out encode tables. + this._fillEncodeTable(0, 0, skipEncodeChars); - webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' }) + // Add more encoding pairs when needed. + if (codecOptions.encodeAdd) { + for (var uChar in codecOptions.encodeAdd) + if (Object.prototype.hasOwnProperty.call(codecOptions.encodeAdd, uChar)) + this._setEncodeChar(uChar.charCodeAt(0), codecOptions.encodeAdd[uChar]); + } - name = webidl.converters.ByteString(name) + this.defCharSB = this.encodeTable[0][iconv.defaultCharSingleByte.charCodeAt(0)]; + if (this.defCharSB === UNASSIGNED) this.defCharSB = this.encodeTable[0]['?']; + if (this.defCharSB === UNASSIGNED) this.defCharSB = "?".charCodeAt(0); - // 1. If name is not a header name, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.delete', - value: name, - type: 'header name' - }) - } - // 2. If this’s guard is "immutable", then throw a TypeError. - // 3. Otherwise, if this’s guard is "request" and name is a - // forbidden header name, return. - // 4. Otherwise, if this’s guard is "request-no-cors", name - // is not a no-CORS-safelisted request-header name, and - // name is not a privileged no-CORS request-header name, - // return. - // 5. Otherwise, if this’s guard is "response" and name is - // a forbidden response-header name, return. - // Note: undici does not implement forbidden header names - if (this[kGuard] === 'immutable') { - throw new TypeError('immutable') - } else if (this[kGuard] === 'request-no-cors') { - // TODO - } + // Load & create GB18030 tables when needed. + if (typeof codecOptions.gb18030 === 'function') { + this.gb18030 = codecOptions.gb18030(); // Load GB18030 ranges. - // 6. If this’s header list does not contain name, then - // return. - if (!this[kHeadersList].contains(name)) { - return - } + // Add GB18030 decode tables. + var thirdByteNodeIdx = this.decodeTables.length; + var thirdByteNode = this.decodeTables[thirdByteNodeIdx] = UNASSIGNED_NODE.slice(0); - // 7. Delete name from this’s header list. - // 8. If this’s guard is "request-no-cors", then remove - // privileged no-CORS request headers from this. - this[kHeadersList].delete(name) - } + var fourthByteNodeIdx = this.decodeTables.length; + var fourthByteNode = this.decodeTables[fourthByteNodeIdx] = UNASSIGNED_NODE.slice(0); - // https://fetch.spec.whatwg.org/#dom-headers-get - get (name) { - webidl.brandCheck(this, Headers) + for (var i = 0x81; i <= 0xFE; i++) { + var secondByteNodeIdx = NODE_START - this.decodeTables[0][i]; + var secondByteNode = this.decodeTables[secondByteNodeIdx]; + for (var j = 0x30; j <= 0x39; j++) + secondByteNode[j] = NODE_START - thirdByteNodeIdx; + } + for (var i = 0x81; i <= 0xFE; i++) + thirdByteNode[i] = NODE_START - fourthByteNodeIdx; + for (var i = 0x30; i <= 0x39; i++) + fourthByteNode[i] = GB18030_CODE + } +} + +DBCSCodec.prototype.encoder = DBCSEncoder; +DBCSCodec.prototype.decoder = DBCSDecoder; + +// Decoder helpers +DBCSCodec.prototype._getDecodeTrieNode = function(addr) { + var bytes = []; + for (; addr > 0; addr >>= 8) + bytes.push(addr & 0xFF); + if (bytes.length == 0) + bytes.push(0); + + var node = this.decodeTables[0]; + for (var i = bytes.length-1; i > 0; i--) { // Traverse nodes deeper into the trie. + var val = node[bytes[i]]; + + if (val == UNASSIGNED) { // Create new node. + node[bytes[i]] = NODE_START - this.decodeTables.length; + this.decodeTables.push(node = UNASSIGNED_NODE.slice(0)); + } + else if (val <= NODE_START) { // Existing node. + node = this.decodeTables[NODE_START - val]; + } + else + throw new Error("Overwrite byte in " + this.encodingName + ", addr: " + addr.toString(16)); + } + return node; +} - webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' }) - name = webidl.converters.ByteString(name) +DBCSCodec.prototype._addDecodeChunk = function(chunk) { + // First element of chunk is the hex mbcs code where we start. + var curAddr = parseInt(chunk[0], 16); - // 1. If name is not a header name, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.get', - value: name, - type: 'header name' - }) + // Choose the decoding node where we'll write our chars. + var writeTable = this._getDecodeTrieNode(curAddr); + curAddr = curAddr & 0xFF; + + // Write all other elements of the chunk to the table. + for (var k = 1; k < chunk.length; k++) { + var part = chunk[k]; + if (typeof part === "string") { // String, write as-is. + for (var l = 0; l < part.length;) { + var code = part.charCodeAt(l++); + if (0xD800 <= code && code < 0xDC00) { // Decode surrogate + var codeTrail = part.charCodeAt(l++); + if (0xDC00 <= codeTrail && codeTrail < 0xE000) + writeTable[curAddr++] = 0x10000 + (code - 0xD800) * 0x400 + (codeTrail - 0xDC00); + else + throw new Error("Incorrect surrogate pair in " + this.encodingName + " at chunk " + chunk[0]); + } + else if (0x0FF0 < code && code <= 0x0FFF) { // Character sequence (our own encoding used) + var len = 0xFFF - code + 2; + var seq = []; + for (var m = 0; m < len; m++) + seq.push(part.charCodeAt(l++)); // Simple variation: don't support surrogates or subsequences in seq. + + writeTable[curAddr++] = SEQ_START - this.decodeTableSeq.length; + this.decodeTableSeq.push(seq); + } + else + writeTable[curAddr++] = code; // Basic char + } + } + else if (typeof part === "number") { // Integer, meaning increasing sequence starting with prev character. + var charCode = writeTable[curAddr - 1] + 1; + for (var l = 0; l < part; l++) + writeTable[curAddr++] = charCode++; + } + else + throw new Error("Incorrect type '" + typeof part + "' given in " + this.encodingName + " at chunk " + chunk[0]); } + if (curAddr > 0xFF) + throw new Error("Incorrect chunk in " + this.encodingName + " at addr " + chunk[0] + ": too long" + curAddr); +} - // 2. Return the result of getting name from this’s header - // list. - return this[kHeadersList].get(name) - } +// Encoder helpers +DBCSCodec.prototype._getEncodeBucket = function(uCode) { + var high = uCode >> 8; // This could be > 0xFF because of astral characters. + if (this.encodeTable[high] === undefined) + this.encodeTable[high] = UNASSIGNED_NODE.slice(0); // Create bucket on demand. + return this.encodeTable[high]; +} - // https://fetch.spec.whatwg.org/#dom-headers-has - has (name) { - webidl.brandCheck(this, Headers) +DBCSCodec.prototype._setEncodeChar = function(uCode, dbcsCode) { + var bucket = this._getEncodeBucket(uCode); + var low = uCode & 0xFF; + if (bucket[low] <= SEQ_START) + this.encodeTableSeq[SEQ_START-bucket[low]][DEF_CHAR] = dbcsCode; // There's already a sequence, set a single-char subsequence of it. + else if (bucket[low] == UNASSIGNED) + bucket[low] = dbcsCode; +} - webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' }) +DBCSCodec.prototype._setEncodeSequence = function(seq, dbcsCode) { + + // Get the root of character tree according to first character of the sequence. + var uCode = seq[0]; + var bucket = this._getEncodeBucket(uCode); + var low = uCode & 0xFF; - name = webidl.converters.ByteString(name) + var node; + if (bucket[low] <= SEQ_START) { + // There's already a sequence with - use it. + node = this.encodeTableSeq[SEQ_START-bucket[low]]; + } + else { + // There was no sequence object - allocate a new one. + node = {}; + if (bucket[low] !== UNASSIGNED) node[DEF_CHAR] = bucket[low]; // If a char was set before - make it a single-char subsequence. + bucket[low] = SEQ_START - this.encodeTableSeq.length; + this.encodeTableSeq.push(node); + } + + // Traverse the character tree, allocating new nodes as needed. + for (var j = 1; j < seq.length-1; j++) { + var oldVal = node[uCode]; + if (typeof oldVal === 'object') + node = oldVal; + else { + node = node[uCode] = {} + if (oldVal !== undefined) + node[DEF_CHAR] = oldVal + } + } - // 1. If name is not a header name, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.has', - value: name, - type: 'header name' - }) + // Set the leaf to given dbcsCode. + uCode = seq[seq.length-1]; + node[uCode] = dbcsCode; +} + +DBCSCodec.prototype._fillEncodeTable = function(nodeIdx, prefix, skipEncodeChars) { + var node = this.decodeTables[nodeIdx]; + for (var i = 0; i < 0x100; i++) { + var uCode = node[i]; + var mbCode = prefix + i; + if (skipEncodeChars[mbCode]) + continue; + + if (uCode >= 0) + this._setEncodeChar(uCode, mbCode); + else if (uCode <= NODE_START) + this._fillEncodeTable(NODE_START - uCode, mbCode << 8, skipEncodeChars); + else if (uCode <= SEQ_START) + this._setEncodeSequence(this.decodeTableSeq[SEQ_START - uCode], mbCode); } +} - // 2. Return true if this’s header list contains name; - // otherwise false. - return this[kHeadersList].contains(name) - } - // https://fetch.spec.whatwg.org/#dom-headers-set - set (name, value) { - webidl.brandCheck(this, Headers) - webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' }) +// == Encoder ================================================================== - name = webidl.converters.ByteString(name) - value = webidl.converters.ByteString(value) +function DBCSEncoder(options, codec) { + // Encoder state + this.leadSurrogate = -1; + this.seqObj = undefined; + + // Static data + this.encodeTable = codec.encodeTable; + this.encodeTableSeq = codec.encodeTableSeq; + this.defaultCharSingleByte = codec.defCharSB; + this.gb18030 = codec.gb18030; +} - // 1. Normalize value. - value = headerValueNormalize(value) +DBCSEncoder.prototype.write = function(str) { + var newBuf = Buffer.alloc(str.length * (this.gb18030 ? 4 : 3)), + leadSurrogate = this.leadSurrogate, + seqObj = this.seqObj, nextChar = -1, + i = 0, j = 0; - // 2. If name is not a header name or value is not a - // header value, then throw a TypeError. - if (!isValidHeaderName(name)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.set', - value: name, - type: 'header name' - }) - } else if (!isValidHeaderValue(value)) { - throw webidl.errors.invalidArgument({ - prefix: 'Headers.set', - value, - type: 'header value' - }) - } + while (true) { + // 0. Get next character. + if (nextChar === -1) { + if (i == str.length) break; + var uCode = str.charCodeAt(i++); + } + else { + var uCode = nextChar; + nextChar = -1; + } - // 3. If this’s guard is "immutable", then throw a TypeError. - // 4. Otherwise, if this’s guard is "request" and name is a - // forbidden header name, return. - // 5. Otherwise, if this’s guard is "request-no-cors" and - // name/value is not a no-CORS-safelisted request-header, - // return. - // 6. Otherwise, if this’s guard is "response" and name is a - // forbidden response-header name, return. - // Note: undici does not implement forbidden header names - if (this[kGuard] === 'immutable') { - throw new TypeError('immutable') - } else if (this[kGuard] === 'request-no-cors') { - // TODO + // 1. Handle surrogates. + if (0xD800 <= uCode && uCode < 0xE000) { // Char is one of surrogates. + if (uCode < 0xDC00) { // We've got lead surrogate. + if (leadSurrogate === -1) { + leadSurrogate = uCode; + continue; + } else { + leadSurrogate = uCode; + // Double lead surrogate found. + uCode = UNASSIGNED; + } + } else { // We've got trail surrogate. + if (leadSurrogate !== -1) { + uCode = 0x10000 + (leadSurrogate - 0xD800) * 0x400 + (uCode - 0xDC00); + leadSurrogate = -1; + } else { + // Incomplete surrogate pair - only trail surrogate found. + uCode = UNASSIGNED; + } + + } + } + else if (leadSurrogate !== -1) { + // Incomplete surrogate pair - only lead surrogate found. + nextChar = uCode; uCode = UNASSIGNED; // Write an error, then current char. + leadSurrogate = -1; + } + + // 2. Convert uCode character. + var dbcsCode = UNASSIGNED; + if (seqObj !== undefined && uCode != UNASSIGNED) { // We are in the middle of the sequence + var resCode = seqObj[uCode]; + if (typeof resCode === 'object') { // Sequence continues. + seqObj = resCode; + continue; + + } else if (typeof resCode == 'number') { // Sequence finished. Write it. + dbcsCode = resCode; + + } else if (resCode == undefined) { // Current character is not part of the sequence. + + // Try default character for this sequence + resCode = seqObj[DEF_CHAR]; + if (resCode !== undefined) { + dbcsCode = resCode; // Found. Write it. + nextChar = uCode; // Current character will be written too in the next iteration. + + } else { + // TODO: What if we have no default? (resCode == undefined) + // Then, we should write first char of the sequence as-is and try the rest recursively. + // Didn't do it for now because no encoding has this situation yet. + // Currently, just skip the sequence and write current char. + } + } + seqObj = undefined; + } + else if (uCode >= 0) { // Regular character + var subtable = this.encodeTable[uCode >> 8]; + if (subtable !== undefined) + dbcsCode = subtable[uCode & 0xFF]; + + if (dbcsCode <= SEQ_START) { // Sequence start + seqObj = this.encodeTableSeq[SEQ_START-dbcsCode]; + continue; + } + + if (dbcsCode == UNASSIGNED && this.gb18030) { + // Use GB18030 algorithm to find character(s) to write. + var idx = findIdx(this.gb18030.uChars, uCode); + if (idx != -1) { + var dbcsCode = this.gb18030.gbChars[idx] + (uCode - this.gb18030.uChars[idx]); + newBuf[j++] = 0x81 + Math.floor(dbcsCode / 12600); dbcsCode = dbcsCode % 12600; + newBuf[j++] = 0x30 + Math.floor(dbcsCode / 1260); dbcsCode = dbcsCode % 1260; + newBuf[j++] = 0x81 + Math.floor(dbcsCode / 10); dbcsCode = dbcsCode % 10; + newBuf[j++] = 0x30 + dbcsCode; + continue; + } + } + } + + // 3. Write dbcsCode character. + if (dbcsCode === UNASSIGNED) + dbcsCode = this.defaultCharSingleByte; + + if (dbcsCode < 0x100) { + newBuf[j++] = dbcsCode; + } + else if (dbcsCode < 0x10000) { + newBuf[j++] = dbcsCode >> 8; // high byte + newBuf[j++] = dbcsCode & 0xFF; // low byte + } + else { + newBuf[j++] = dbcsCode >> 16; + newBuf[j++] = (dbcsCode >> 8) & 0xFF; + newBuf[j++] = dbcsCode & 0xFF; + } } - // 7. Set (name, value) in this’s header list. - // 8. If this’s guard is "request-no-cors", then remove - // privileged no-CORS request headers from this - this[kHeadersList].set(name, value) - } + this.seqObj = seqObj; + this.leadSurrogate = leadSurrogate; + return newBuf.slice(0, j); +} - // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie - getSetCookie () { - webidl.brandCheck(this, Headers) +DBCSEncoder.prototype.end = function() { + if (this.leadSurrogate === -1 && this.seqObj === undefined) + return; // All clean. Most often case. - // 1. If this’s header list does not contain `Set-Cookie`, then return « ». - // 2. Return the values of all headers in this’s header list whose name is - // a byte-case-insensitive match for `Set-Cookie`, in order. + var newBuf = Buffer.alloc(10), j = 0; - const list = this[kHeadersList].cookies + if (this.seqObj) { // We're in the sequence. + var dbcsCode = this.seqObj[DEF_CHAR]; + if (dbcsCode !== undefined) { // Write beginning of the sequence. + if (dbcsCode < 0x100) { + newBuf[j++] = dbcsCode; + } + else { + newBuf[j++] = dbcsCode >> 8; // high byte + newBuf[j++] = dbcsCode & 0xFF; // low byte + } + } else { + // See todo above. + } + this.seqObj = undefined; + } - if (list) { - return [...list] + if (this.leadSurrogate !== -1) { + // Incomplete surrogate pair - only lead surrogate found. + newBuf[j++] = this.defaultCharSingleByte; + this.leadSurrogate = -1; } + + return newBuf.slice(0, j); +} - return [] - } +// Export for testing +DBCSEncoder.prototype.findIdx = findIdx; - // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine - get [kHeadersSortedMap] () { - if (this[kHeadersList][kHeadersSortedMap]) { - return this[kHeadersList][kHeadersSortedMap] - } - // 1. Let headers be an empty list of headers with the key being the name - // and value the value. - const headers = [] +// == Decoder ================================================================== - // 2. Let names be the result of convert header names to a sorted-lowercase - // set with all the names of the headers in list. - const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1) - const cookies = this[kHeadersList].cookies +function DBCSDecoder(options, codec) { + // Decoder state + this.nodeIdx = 0; + this.prevBuf = Buffer.alloc(0); - // 3. For each name of names: - for (let i = 0; i < names.length; ++i) { - const [name, value] = names[i] - // 1. If name is `set-cookie`, then: - if (name === 'set-cookie') { - // 1. Let values be a list of all values of headers in list whose name - // is a byte-case-insensitive match for name, in order. + // Static data + this.decodeTables = codec.decodeTables; + this.decodeTableSeq = codec.decodeTableSeq; + this.defaultCharUnicode = codec.defaultCharUnicode; + this.gb18030 = codec.gb18030; +} - // 2. For each value of values: - // 1. Append (name, value) to headers. - for (let j = 0; j < cookies.length; ++j) { - headers.push([name, cookies[j]]) +DBCSDecoder.prototype.write = function(buf) { + var newBuf = Buffer.alloc(buf.length*2), + nodeIdx = this.nodeIdx, + prevBuf = this.prevBuf, prevBufOffset = this.prevBuf.length, + seqStart = -this.prevBuf.length, // idx of the start of current parsed sequence. + uCode; + + if (prevBufOffset > 0) // Make prev buf overlap a little to make it easier to slice later. + prevBuf = Buffer.concat([prevBuf, buf.slice(0, 10)]); + + for (var i = 0, j = 0; i < buf.length; i++) { + var curByte = (i >= 0) ? buf[i] : prevBuf[i + prevBufOffset]; + + // Lookup in current trie node. + var uCode = this.decodeTables[nodeIdx][curByte]; + + if (uCode >= 0) { + // Normal character, just use it. } - } else { - // 2. Otherwise: + else if (uCode === UNASSIGNED) { // Unknown char. + // TODO: Callback with seq. + //var curSeq = (seqStart >= 0) ? buf.slice(seqStart, i+1) : prevBuf.slice(seqStart + prevBufOffset, i+1 + prevBufOffset); + i = seqStart; // Try to parse again, after skipping first byte of the sequence ('i' will be incremented by 'for' cycle). + uCode = this.defaultCharUnicode.charCodeAt(0); + } + else if (uCode === GB18030_CODE) { + var curSeq = (seqStart >= 0) ? buf.slice(seqStart, i+1) : prevBuf.slice(seqStart + prevBufOffset, i+1 + prevBufOffset); + var ptr = (curSeq[0]-0x81)*12600 + (curSeq[1]-0x30)*1260 + (curSeq[2]-0x81)*10 + (curSeq[3]-0x30); + var idx = findIdx(this.gb18030.gbChars, ptr); + uCode = this.gb18030.uChars[idx] + ptr - this.gb18030.gbChars[idx]; + } + else if (uCode <= NODE_START) { // Go to next trie node. + nodeIdx = NODE_START - uCode; + continue; + } + else if (uCode <= SEQ_START) { // Output a sequence of chars. + var seq = this.decodeTableSeq[SEQ_START - uCode]; + for (var k = 0; k < seq.length - 1; k++) { + uCode = seq[k]; + newBuf[j++] = uCode & 0xFF; + newBuf[j++] = uCode >> 8; + } + uCode = seq[seq.length-1]; + } + else + throw new Error("iconv-lite internal error: invalid decoding table value " + uCode + " at " + nodeIdx + "/" + curByte); - // 1. Let value be the result of getting name from list. + // Write the character to buffer, handling higher planes using surrogate pair. + if (uCode > 0xFFFF) { + uCode -= 0x10000; + var uCodeLead = 0xD800 + Math.floor(uCode / 0x400); + newBuf[j++] = uCodeLead & 0xFF; + newBuf[j++] = uCodeLead >> 8; - // 2. Assert: value is non-null. - assert(value !== null) + uCode = 0xDC00 + uCode % 0x400; + } + newBuf[j++] = uCode & 0xFF; + newBuf[j++] = uCode >> 8; - // 3. Append (name, value) to headers. - headers.push([name, value]) - } + // Reset trie node. + nodeIdx = 0; seqStart = i+1; } - this[kHeadersList][kHeadersSortedMap] = headers + this.nodeIdx = nodeIdx; + this.prevBuf = (seqStart >= 0) ? buf.slice(seqStart) : prevBuf.slice(seqStart + prevBufOffset); + return newBuf.slice(0, j).toString('ucs2'); +} - // 4. Return headers. - return headers - } +DBCSDecoder.prototype.end = function() { + var ret = ''; - keys () { - webidl.brandCheck(this, Headers) + // Try to parse all remaining chars. + while (this.prevBuf.length > 0) { + // Skip 1 character in the buffer. + ret += this.defaultCharUnicode; + var buf = this.prevBuf.slice(1); - if (this[kGuard] === 'immutable') { - const value = this[kHeadersSortedMap] - return makeIterator(() => value, 'Headers', - 'key') + // Parse remaining as usual. + this.prevBuf = Buffer.alloc(0); + this.nodeIdx = 0; + if (buf.length > 0) + ret += this.write(buf); } - return makeIterator( - () => [...this[kHeadersSortedMap].values()], - 'Headers', - 'key' - ) - } + this.nodeIdx = 0; + return ret; +} - values () { - webidl.brandCheck(this, Headers) +// Binary search for GB18030. Returns largest i such that table[i] <= val. +function findIdx(table, val) { + if (table[0] > val) + return -1; - if (this[kGuard] === 'immutable') { - const value = this[kHeadersSortedMap] - return makeIterator(() => value, 'Headers', - 'value') + var l = 0, r = table.length; + while (l < r-1) { // always table[l] <= val < table[r] + var mid = l + Math.floor((r-l+1)/2); + if (table[mid] <= val) + l = mid; + else + r = mid; } + return l; +} - return makeIterator( - () => [...this[kHeadersSortedMap].values()], - 'Headers', - 'value' - ) - } - entries () { - webidl.brandCheck(this, Headers) - if (this[kGuard] === 'immutable') { - const value = this[kHeadersSortedMap] - return makeIterator(() => value, 'Headers', - 'key+value') - } +/***/ }), - return makeIterator( - () => [...this[kHeadersSortedMap].values()], - 'Headers', - 'key+value' - ) - } +/***/ 91386: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /** - * @param {(value: string, key: string, self: Headers) => void} callbackFn - * @param {unknown} thisArg - */ - forEach (callbackFn, thisArg = globalThis) { - webidl.brandCheck(this, Headers) +"use strict"; - webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' }) - if (typeof callbackFn !== 'function') { - throw new TypeError( - "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'." - ) - } +// Description of supported double byte encodings and aliases. +// Tables are not require()-d until they are needed to speed up library load. +// require()-s are direct to support Browserify. - for (const [key, value] of this) { - callbackFn.apply(thisArg, [value, key, this]) - } - } +module.exports = { + + // == Japanese/ShiftJIS ==================================================== + // All japanese encodings are based on JIS X set of standards: + // JIS X 0201 - Single-byte encoding of ASCII + ¥ + Kana chars at 0xA1-0xDF. + // JIS X 0208 - Main set of 6879 characters, placed in 94x94 plane, to be encoded by 2 bytes. + // Has several variations in 1978, 1983, 1990 and 1997. + // JIS X 0212 - Supplementary plane of 6067 chars in 94x94 plane. 1990. Effectively dead. + // JIS X 0213 - Extension and modern replacement of 0208 and 0212. Total chars: 11233. + // 2 planes, first is superset of 0208, second - revised 0212. + // Introduced in 2000, revised 2004. Some characters are in Unicode Plane 2 (0x2xxxx) + + // Byte encodings are: + // * Shift_JIS: Compatible with 0201, uses not defined chars in top half as lead bytes for double-byte + // encoding of 0208. Lead byte ranges: 0x81-0x9F, 0xE0-0xEF; Trail byte ranges: 0x40-0x7E, 0x80-0x9E, 0x9F-0xFC. + // Windows CP932 is a superset of Shift_JIS. Some companies added more chars, notably KDDI. + // * EUC-JP: Up to 3 bytes per character. Used mostly on *nixes. + // 0x00-0x7F - lower part of 0201 + // 0x8E, 0xA1-0xDF - upper part of 0201 + // (0xA1-0xFE)x2 - 0208 plane (94x94). + // 0x8F, (0xA1-0xFE)x2 - 0212 plane (94x94). + // * JIS X 208: 7-bit, direct encoding of 0208. Byte ranges: 0x21-0x7E (94 values). Uncommon. + // Used as-is in ISO2022 family. + // * ISO2022-JP: Stateful encoding, with escape sequences to switch between ASCII, + // 0201-1976 Roman, 0208-1978, 0208-1983. + // * ISO2022-JP-1: Adds esc seq for 0212-1990. + // * ISO2022-JP-2: Adds esc seq for GB2313-1980, KSX1001-1992, ISO8859-1, ISO8859-7. + // * ISO2022-JP-3: Adds esc seq for 0201-1976 Kana set, 0213-2000 Planes 1, 2. + // * ISO2022-JP-2004: Adds 0213-2004 Plane 1. + // + // After JIS X 0213 appeared, Shift_JIS-2004, EUC-JISX0213 and ISO2022-JP-2004 followed, with just changing the planes. + // + // Overall, it seems that it's a mess :( http://www8.plala.or.jp/tkubota1/unicode-symbols-map2.html - [Symbol.for('nodejs.util.inspect.custom')] () { - webidl.brandCheck(this, Headers) + 'shiftjis': { + type: '_dbcs', + table: function() { return __nccwpck_require__(27014) }, + encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E}, + encodeSkipVals: [{from: 0xED40, to: 0xF940}], + }, + 'csshiftjis': 'shiftjis', + 'mskanji': 'shiftjis', + 'sjis': 'shiftjis', + 'windows31j': 'shiftjis', + 'ms31j': 'shiftjis', + 'xsjis': 'shiftjis', + 'windows932': 'shiftjis', + 'ms932': 'shiftjis', + '932': 'shiftjis', + 'cp932': 'shiftjis', + + 'eucjp': { + type: '_dbcs', + table: function() { return __nccwpck_require__(31532) }, + encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E}, + }, - return this[kHeadersList] - } -} + // TODO: KDDI extension to Shift_JIS + // TODO: IBM CCSID 942 = CP932, but F0-F9 custom chars and other char changes. + // TODO: IBM CCSID 943 = Shift_JIS = CP932 with original Shift_JIS lower 128 chars. + + + // == Chinese/GBK ========================================================== + // http://en.wikipedia.org/wiki/GBK + // We mostly implement W3C recommendation: https://www.w3.org/TR/encoding/#gbk-encoder + + // Oldest GB2312 (1981, ~7600 chars) is a subset of CP936 + 'gb2312': 'cp936', + 'gb231280': 'cp936', + 'gb23121980': 'cp936', + 'csgb2312': 'cp936', + 'csiso58gb231280': 'cp936', + 'euccn': 'cp936', + + // Microsoft's CP936 is a subset and approximation of GBK. + 'windows936': 'cp936', + 'ms936': 'cp936', + '936': 'cp936', + 'cp936': { + type: '_dbcs', + table: function() { return __nccwpck_require__(13336) }, + }, -Headers.prototype[Symbol.iterator] = Headers.prototype.entries + // GBK (~22000 chars) is an extension of CP936 that added user-mapped chars and some other. + 'gbk': { + type: '_dbcs', + table: function() { return (__nccwpck_require__(13336).concat)(__nccwpck_require__(44346)) }, + }, + 'xgbk': 'gbk', + 'isoir58': 'gbk', + + // GB18030 is an algorithmic extension of GBK. + // Main source: https://www.w3.org/TR/encoding/#gbk-encoder + // http://icu-project.org/docs/papers/gb18030.html + // http://source.icu-project.org/repos/icu/data/trunk/charset/data/xml/gb-18030-2000.xml + // http://www.khngai.com/chinese/charmap/tblgbk.php?page=0 + 'gb18030': { + type: '_dbcs', + table: function() { return (__nccwpck_require__(13336).concat)(__nccwpck_require__(44346)) }, + gb18030: function() { return __nccwpck_require__(36258) }, + encodeSkipVals: [0x80], + encodeAdd: {'€': 0xA2E3}, + }, -Object.defineProperties(Headers.prototype, { - append: kEnumerableProperty, - delete: kEnumerableProperty, - get: kEnumerableProperty, - has: kEnumerableProperty, - set: kEnumerableProperty, - getSetCookie: kEnumerableProperty, - keys: kEnumerableProperty, - values: kEnumerableProperty, - entries: kEnumerableProperty, - forEach: kEnumerableProperty, - [Symbol.iterator]: { enumerable: false }, - [Symbol.toStringTag]: { - value: 'Headers', - configurable: true - } -}) + 'chinese': 'gb18030', -webidl.converters.HeadersInit = function (V) { - if (webidl.util.Type(V) === 'Object') { - if (V[Symbol.iterator]) { - return webidl.converters['sequence>'](V) - } - return webidl.converters['record'](V) - } + // == Korean =============================================================== + // EUC-KR, KS_C_5601 and KS X 1001 are exactly the same. + 'windows949': 'cp949', + 'ms949': 'cp949', + '949': 'cp949', + 'cp949': { + type: '_dbcs', + table: function() { return __nccwpck_require__(77348) }, + }, - throw webidl.errors.conversionFailed({ - prefix: 'Headers constructor', - argument: 'Argument 1', - types: ['sequence>', 'record'] - }) -} + 'cseuckr': 'cp949', + 'csksc56011987': 'cp949', + 'euckr': 'cp949', + 'isoir149': 'cp949', + 'korean': 'cp949', + 'ksc56011987': 'cp949', + 'ksc56011989': 'cp949', + 'ksc5601': 'cp949', + + + // == Big5/Taiwan/Hong Kong ================================================ + // There are lots of tables for Big5 and cp950. Please see the following links for history: + // http://moztw.org/docs/big5/ http://www.haible.de/bruno/charsets/conversion-tables/Big5.html + // Variations, in roughly number of defined chars: + // * Windows CP 950: Microsoft variant of Big5. Canonical: http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT + // * Windows CP 951: Microsoft variant of Big5-HKSCS-2001. Seems to be never public. http://me.abelcheung.org/articles/research/what-is-cp951/ + // * Big5-2003 (Taiwan standard) almost superset of cp950. + // * Unicode-at-on (UAO) / Mozilla 1.8. Falling out of use on the Web. Not supported by other browsers. + // * Big5-HKSCS (-2001, -2004, -2008). Hong Kong standard. + // many unicode code points moved from PUA to Supplementary plane (U+2XXXX) over the years. + // Plus, it has 4 combining sequences. + // Seems that Mozilla refused to support it for 10 yrs. https://bugzilla.mozilla.org/show_bug.cgi?id=162431 https://bugzilla.mozilla.org/show_bug.cgi?id=310299 + // because big5-hkscs is the only encoding to include astral characters in non-algorithmic way. + // Implementations are not consistent within browsers; sometimes labeled as just big5. + // MS Internet Explorer switches from big5 to big5-hkscs when a patch applied. + // Great discussion & recap of what's going on https://bugzilla.mozilla.org/show_bug.cgi?id=912470#c31 + // In the encoder, it might make sense to support encoding old PUA mappings to Big5 bytes seq-s. + // Official spec: http://www.ogcio.gov.hk/en/business/tech_promotion/ccli/terms/doc/2003cmp_2008.txt + // http://www.ogcio.gov.hk/tc/business/tech_promotion/ccli/terms/doc/hkscs-2008-big5-iso.txt + // + // Current understanding of how to deal with Big5(-HKSCS) is in the Encoding Standard, http://encoding.spec.whatwg.org/#big5-encoder + // Unicode mapping (http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT) is said to be wrong. + + 'windows950': 'cp950', + 'ms950': 'cp950', + '950': 'cp950', + 'cp950': { + type: '_dbcs', + table: function() { return __nccwpck_require__(74284) }, + }, -module.exports = { - fill, - Headers, - HeadersList + // Big5 has many variations and is an extension of cp950. We use Encoding Standard's as a consensus. + 'big5': 'big5hkscs', + 'big5hkscs': { + type: '_dbcs', + table: function() { return (__nccwpck_require__(74284).concat)(__nccwpck_require__(63480)) }, + encodeSkipVals: [0xa2cc], + }, + + 'cnbig5': 'big5hkscs', + 'csbig5': 'big5hkscs', + 'xxbig5': 'big5hkscs', +}; + + +/***/ }), + +/***/ 82733: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +// Update this array if you add/rename/remove files in this directory. +// We support Browserify by skipping automatic module discovery and requiring modules directly. +var modules = [ + __nccwpck_require__(12376), + __nccwpck_require__(11155), + __nccwpck_require__(51644), + __nccwpck_require__(26657), + __nccwpck_require__(41080), + __nccwpck_require__(21012), + __nccwpck_require__(39695), + __nccwpck_require__(91386), +]; + +// Put all encoding/alias/codec definitions to single object and export it. +for (var i = 0; i < modules.length; i++) { + var module = modules[i]; + for (var enc in module) + if (Object.prototype.hasOwnProperty.call(module, enc)) + exports[enc] = module[enc]; } /***/ }), -/***/ 4881: +/***/ 12376: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -// https://github.com/Ethan-Arrowood/undici-fetch +var Buffer = (__nccwpck_require__(15118).Buffer); +// Export Node.js internal encodings. -const { - Response, - makeNetworkError, - makeAppropriateNetworkError, - filterResponse, - makeResponse -} = __nccwpck_require__(7823) -const { Headers } = __nccwpck_require__(554) -const { Request, makeRequest } = __nccwpck_require__(8359) -const zlib = __nccwpck_require__(9796) -const { - bytesMatch, - makePolicyContainer, - clonePolicyContainer, - requestBadPort, - TAOCheck, - appendRequestOriginHeader, - responseLocationURL, - requestCurrentURL, - setRequestReferrerPolicyOnRedirect, - tryUpgradeRequestToAPotentiallyTrustworthyURL, - createOpaqueTimingInfo, - appendFetchMetadata, - corsCheck, - crossOriginResourcePolicyCheck, - determineRequestsReferrer, - coarsenedSharedCurrentTime, - createDeferredPromise, - isBlobLike, - sameOrigin, - isCancelled, - isAborted, - isErrorLike, - fullyReadBody, - readableStreamClose, - isomorphicEncode, - urlIsLocal, - urlIsHttpHttpsScheme, - urlHasHttpsScheme -} = __nccwpck_require__(2538) -const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) -const assert = __nccwpck_require__(9491) -const { safelyExtractBody } = __nccwpck_require__(1472) -const { - redirectStatusSet, - nullBodyStatus, - safeMethodsSet, - requestBodyHeader, - subresourceSet, - DOMException -} = __nccwpck_require__(1037) -const { kHeadersList } = __nccwpck_require__(2785) -const EE = __nccwpck_require__(2361) -const { Readable, pipeline } = __nccwpck_require__(2781) -const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = __nccwpck_require__(3983) -const { dataURLProcessor, serializeAMimeType } = __nccwpck_require__(685) -const { TransformStream } = __nccwpck_require__(5356) -const { getGlobalDispatcher } = __nccwpck_require__(1892) -const { webidl } = __nccwpck_require__(1744) -const { STATUS_CODES } = __nccwpck_require__(3685) -const GET_OR_HEAD = ['GET', 'HEAD'] +module.exports = { + // Encodings + utf8: { type: "_internal", bomAware: true}, + cesu8: { type: "_internal", bomAware: true}, + unicode11utf8: "utf8", -/** @type {import('buffer').resolveObjectURL} */ -let resolveObjectURL -let ReadableStream = globalThis.ReadableStream + ucs2: { type: "_internal", bomAware: true}, + utf16le: "ucs2", -class Fetch extends EE { - constructor (dispatcher) { - super() + binary: { type: "_internal" }, + base64: { type: "_internal" }, + hex: { type: "_internal" }, - this.dispatcher = dispatcher - this.connection = null - this.dump = false - this.state = 'ongoing' - // 2 terminated listeners get added per request, - // but only 1 gets removed. If there are 20 redirects, - // 21 listeners will be added. - // See https://github.com/nodejs/undici/issues/1711 - // TODO (fix): Find and fix root cause for leaked listener. - this.setMaxListeners(21) - } + // Codec. + _internal: InternalCodec, +}; - terminate (reason) { - if (this.state !== 'ongoing') { - return - } +//------------------------------------------------------------------------------ - this.state = 'terminated' - this.connection?.destroy(reason) - this.emit('terminated', reason) - } +function InternalCodec(codecOptions, iconv) { + this.enc = codecOptions.encodingName; + this.bomAware = codecOptions.bomAware; - // https://fetch.spec.whatwg.org/#fetch-controller-abort - abort (error) { - if (this.state !== 'ongoing') { - return + if (this.enc === "base64") + this.encoder = InternalEncoderBase64; + else if (this.enc === "cesu8") { + this.enc = "utf8"; // Use utf8 for decoding. + this.encoder = InternalEncoderCesu8; + + // Add decoder for versions of Node not supporting CESU-8 + if (Buffer.from('eda0bdedb2a9', 'hex').toString() !== '💩') { + this.decoder = InternalDecoderCesu8; + this.defaultCharUnicode = iconv.defaultCharUnicode; + } } +} - // 1. Set controller’s state to "aborted". - this.state = 'aborted' +InternalCodec.prototype.encoder = InternalEncoder; +InternalCodec.prototype.decoder = InternalDecoder; - // 2. Let fallbackError be an "AbortError" DOMException. - // 3. Set error to fallbackError if it is not given. - if (!error) { - error = new DOMException('The operation was aborted.', 'AbortError') - } +//------------------------------------------------------------------------------ - // 4. Let serializedError be StructuredSerialize(error). - // If that threw an exception, catch it, and let - // serializedError be StructuredSerialize(fallbackError). +// We use node.js internal decoder. Its signature is the same as ours. +var StringDecoder = (__nccwpck_require__(71576).StringDecoder); - // 5. Set controller’s serialized abort reason to serializedError. - this.serializedAbortReason = error +if (!StringDecoder.prototype.end) // Node v0.8 doesn't have this method. + StringDecoder.prototype.end = function() {}; - this.connection?.destroy(error) - this.emit('terminated', error) - } -} -// https://fetch.spec.whatwg.org/#fetch-method -function fetch (input, init = {}) { - webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' }) +function InternalDecoder(options, codec) { + StringDecoder.call(this, codec.enc); +} - // 1. Let p be a new promise. - const p = createDeferredPromise() +InternalDecoder.prototype = StringDecoder.prototype; - // 2. Let requestObject be the result of invoking the initial value of - // Request as constructor with input and init as arguments. If this throws - // an exception, reject p with it and return p. - let requestObject - try { - requestObject = new Request(input, init) - } catch (e) { - p.reject(e) - return p.promise - } +//------------------------------------------------------------------------------ +// Encoder is mostly trivial - // 3. Let request be requestObject’s request. - const request = requestObject[kState] +function InternalEncoder(options, codec) { + this.enc = codec.enc; +} - // 4. If requestObject’s signal’s aborted flag is set, then: - if (requestObject.signal.aborted) { - // 1. Abort the fetch() call with p, request, null, and - // requestObject’s signal’s abort reason. - abortFetch(p, request, null, requestObject.signal.reason) +InternalEncoder.prototype.write = function(str) { + return Buffer.from(str, this.enc); +} - // 2. Return p. - return p.promise - } +InternalEncoder.prototype.end = function() { +} - // 5. Let globalObject be request’s client’s global object. - const globalObject = request.client.globalObject - // 6. If globalObject is a ServiceWorkerGlobalScope object, then set - // request’s service-workers mode to "none". - if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') { - request.serviceWorkers = 'none' - } +//------------------------------------------------------------------------------ +// Except base64 encoder, which must keep its state. - // 7. Let responseObject be null. - let responseObject = null +function InternalEncoderBase64(options, codec) { + this.prevStr = ''; +} - // 8. Let relevantRealm be this’s relevant Realm. - const relevantRealm = null +InternalEncoderBase64.prototype.write = function(str) { + str = this.prevStr + str; + var completeQuads = str.length - (str.length % 4); + this.prevStr = str.slice(completeQuads); + str = str.slice(0, completeQuads); - // 9. Let locallyAborted be false. - let locallyAborted = false + return Buffer.from(str, "base64"); +} - // 10. Let controller be null. - let controller = null +InternalEncoderBase64.prototype.end = function() { + return Buffer.from(this.prevStr, "base64"); +} - // 11. Add the following abort steps to requestObject’s signal: - addAbortListener( - requestObject.signal, - () => { - // 1. Set locallyAborted to true. - locallyAborted = true - // 2. Assert: controller is non-null. - assert(controller != null) +//------------------------------------------------------------------------------ +// CESU-8 encoder is also special. - // 3. Abort controller with requestObject’s signal’s abort reason. - controller.abort(requestObject.signal.reason) +function InternalEncoderCesu8(options, codec) { +} - // 4. Abort the fetch() call with p, request, responseObject, - // and requestObject’s signal’s abort reason. - abortFetch(p, request, responseObject, requestObject.signal.reason) +InternalEncoderCesu8.prototype.write = function(str) { + var buf = Buffer.alloc(str.length * 3), bufIdx = 0; + for (var i = 0; i < str.length; i++) { + var charCode = str.charCodeAt(i); + // Naive implementation, but it works because CESU-8 is especially easy + // to convert from UTF-16 (which all JS strings are encoded in). + if (charCode < 0x80) + buf[bufIdx++] = charCode; + else if (charCode < 0x800) { + buf[bufIdx++] = 0xC0 + (charCode >>> 6); + buf[bufIdx++] = 0x80 + (charCode & 0x3f); + } + else { // charCode will always be < 0x10000 in javascript. + buf[bufIdx++] = 0xE0 + (charCode >>> 12); + buf[bufIdx++] = 0x80 + ((charCode >>> 6) & 0x3f); + buf[bufIdx++] = 0x80 + (charCode & 0x3f); + } } - ) + return buf.slice(0, bufIdx); +} - // 12. Let handleFetchDone given response response be to finalize and - // report timing with response, globalObject, and "fetch". - const handleFetchDone = (response) => - finalizeAndReportTiming(response, 'fetch') +InternalEncoderCesu8.prototype.end = function() { +} - // 13. Set controller to the result of calling fetch given request, - // with processResponseEndOfBody set to handleFetchDone, and processResponse - // given response being these substeps: +//------------------------------------------------------------------------------ +// CESU-8 decoder is not implemented in Node v4.0+ - const processResponse = (response) => { - // 1. If locallyAborted is true, terminate these substeps. - if (locallyAborted) { - return Promise.resolve() +function InternalDecoderCesu8(options, codec) { + this.acc = 0; + this.contBytes = 0; + this.accBytes = 0; + this.defaultCharUnicode = codec.defaultCharUnicode; +} + +InternalDecoderCesu8.prototype.write = function(buf) { + var acc = this.acc, contBytes = this.contBytes, accBytes = this.accBytes, + res = ''; + for (var i = 0; i < buf.length; i++) { + var curByte = buf[i]; + if ((curByte & 0xC0) !== 0x80) { // Leading byte + if (contBytes > 0) { // Previous code is invalid + res += this.defaultCharUnicode; + contBytes = 0; + } + + if (curByte < 0x80) { // Single-byte code + res += String.fromCharCode(curByte); + } else if (curByte < 0xE0) { // Two-byte code + acc = curByte & 0x1F; + contBytes = 1; accBytes = 1; + } else if (curByte < 0xF0) { // Three-byte code + acc = curByte & 0x0F; + contBytes = 2; accBytes = 1; + } else { // Four or more are not supported for CESU-8. + res += this.defaultCharUnicode; + } + } else { // Continuation byte + if (contBytes > 0) { // We're waiting for it. + acc = (acc << 6) | (curByte & 0x3f); + contBytes--; accBytes++; + if (contBytes === 0) { + // Check for overlong encoding, but support Modified UTF-8 (encoding NULL as C0 80) + if (accBytes === 2 && acc < 0x80 && acc > 0) + res += this.defaultCharUnicode; + else if (accBytes === 3 && acc < 0x800) + res += this.defaultCharUnicode; + else + // Actually add character. + res += String.fromCharCode(acc); + } + } else { // Unexpected continuation byte + res += this.defaultCharUnicode; + } + } } + this.acc = acc; this.contBytes = contBytes; this.accBytes = accBytes; + return res; +} - // 2. If response’s aborted flag is set, then: - if (response.aborted) { - // 1. Let deserializedError be the result of deserialize a serialized - // abort reason given controller’s serialized abort reason and - // relevantRealm. +InternalDecoderCesu8.prototype.end = function() { + var res = 0; + if (this.contBytes > 0) + res += this.defaultCharUnicode; + return res; +} - // 2. Abort the fetch() call with p, request, responseObject, and - // deserializedError. - abortFetch(p, request, responseObject, controller.serializedAbortReason) - return Promise.resolve() - } +/***/ }), - // 3. If response is a network error, then reject p with a TypeError - // and terminate these substeps. - if (response.type === 'error') { - p.reject( - Object.assign(new TypeError('fetch failed'), { cause: response.error }) - ) - return Promise.resolve() +/***/ 26657: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +var Buffer = (__nccwpck_require__(15118).Buffer); + +// Single-byte codec. Needs a 'chars' string parameter that contains 256 or 128 chars that +// correspond to encoded bytes (if 128 - then lower half is ASCII). + +exports._sbcs = SBCSCodec; +function SBCSCodec(codecOptions, iconv) { + if (!codecOptions) + throw new Error("SBCS codec is called without the data.") + + // Prepare char buffer for decoding. + if (!codecOptions.chars || (codecOptions.chars.length !== 128 && codecOptions.chars.length !== 256)) + throw new Error("Encoding '"+codecOptions.type+"' has incorrect 'chars' (must be of len 128 or 256)"); + + if (codecOptions.chars.length === 128) { + var asciiString = ""; + for (var i = 0; i < 128; i++) + asciiString += String.fromCharCode(i); + codecOptions.chars = asciiString + codecOptions.chars; } - // 4. Set responseObject to the result of creating a Response object, - // given response, "immutable", and relevantRealm. - responseObject = new Response() - responseObject[kState] = response - responseObject[kRealm] = relevantRealm - responseObject[kHeaders][kHeadersList] = response.headersList - responseObject[kHeaders][kGuard] = 'immutable' - responseObject[kHeaders][kRealm] = relevantRealm + this.decodeBuf = Buffer.from(codecOptions.chars, 'ucs2'); + + // Encoding buffer. + var encodeBuf = Buffer.alloc(65536, iconv.defaultCharSingleByte.charCodeAt(0)); - // 5. Resolve p with responseObject. - p.resolve(responseObject) - } + for (var i = 0; i < codecOptions.chars.length; i++) + encodeBuf[codecOptions.chars.charCodeAt(i)] = i; - controller = fetching({ - request, - processResponseEndOfBody: handleFetchDone, - processResponse, - dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici - }) + this.encodeBuf = encodeBuf; +} - // 14. Return p. - return p.promise +SBCSCodec.prototype.encoder = SBCSEncoder; +SBCSCodec.prototype.decoder = SBCSDecoder; + + +function SBCSEncoder(options, codec) { + this.encodeBuf = codec.encodeBuf; } -// https://fetch.spec.whatwg.org/#finalize-and-report-timing -function finalizeAndReportTiming (response, initiatorType = 'other') { - // 1. If response is an aborted network error, then return. - if (response.type === 'error' && response.aborted) { - return - } +SBCSEncoder.prototype.write = function(str) { + var buf = Buffer.alloc(str.length); + for (var i = 0; i < str.length; i++) + buf[i] = this.encodeBuf[str.charCodeAt(i)]; + + return buf; +} - // 2. If response’s URL list is null or empty, then return. - if (!response.urlList?.length) { - return - } +SBCSEncoder.prototype.end = function() { +} - // 3. Let originalURL be response’s URL list[0]. - const originalURL = response.urlList[0] - // 4. Let timingInfo be response’s timing info. - let timingInfo = response.timingInfo +function SBCSDecoder(options, codec) { + this.decodeBuf = codec.decodeBuf; +} - // 5. Let cacheState be response’s cache state. - let cacheState = response.cacheState +SBCSDecoder.prototype.write = function(buf) { + // Strings are immutable in JS -> we use ucs2 buffer to speed up computations. + var decodeBuf = this.decodeBuf; + var newBuf = Buffer.alloc(buf.length*2); + var idx1 = 0, idx2 = 0; + for (var i = 0; i < buf.length; i++) { + idx1 = buf[i]*2; idx2 = i*2; + newBuf[idx2] = decodeBuf[idx1]; + newBuf[idx2+1] = decodeBuf[idx1+1]; + } + return newBuf.toString('ucs2'); +} - // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return. - if (!urlIsHttpHttpsScheme(originalURL)) { - return - } +SBCSDecoder.prototype.end = function() { +} - // 7. If timingInfo is null, then return. - if (timingInfo === null) { - return - } - // 8. If response’s timing allow passed flag is not set, then: - if (!response.timingAllowPassed) { - // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. - timingInfo = createOpaqueTimingInfo({ - startTime: timingInfo.startTime - }) +/***/ }), - // 2. Set cacheState to the empty string. - cacheState = '' - } +/***/ 21012: +/***/ ((module) => { - // 9. Set timingInfo’s end time to the coarsened shared current time - // given global’s relevant settings object’s cross-origin isolated - // capability. - // TODO: given global’s relevant settings object’s cross-origin isolated - // capability? - timingInfo.endTime = coarsenedSharedCurrentTime() +"use strict"; - // 10. Set response’s timing info to timingInfo. - response.timingInfo = timingInfo - // 11. Mark resource timing for timingInfo, originalURL, initiatorType, - // global, and cacheState. - markResourceTiming( - timingInfo, - originalURL, - initiatorType, - globalThis, - cacheState - ) -} - -// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing -function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) { - if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) { - performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState) +// Generated data for sbcs codec. Don't edit manually. Regenerate using generation/gen-sbcs.js script. +module.exports = { + "437": "cp437", + "737": "cp737", + "775": "cp775", + "850": "cp850", + "852": "cp852", + "855": "cp855", + "856": "cp856", + "857": "cp857", + "858": "cp858", + "860": "cp860", + "861": "cp861", + "862": "cp862", + "863": "cp863", + "864": "cp864", + "865": "cp865", + "866": "cp866", + "869": "cp869", + "874": "windows874", + "922": "cp922", + "1046": "cp1046", + "1124": "cp1124", + "1125": "cp1125", + "1129": "cp1129", + "1133": "cp1133", + "1161": "cp1161", + "1162": "cp1162", + "1163": "cp1163", + "1250": "windows1250", + "1251": "windows1251", + "1252": "windows1252", + "1253": "windows1253", + "1254": "windows1254", + "1255": "windows1255", + "1256": "windows1256", + "1257": "windows1257", + "1258": "windows1258", + "28591": "iso88591", + "28592": "iso88592", + "28593": "iso88593", + "28594": "iso88594", + "28595": "iso88595", + "28596": "iso88596", + "28597": "iso88597", + "28598": "iso88598", + "28599": "iso88599", + "28600": "iso885910", + "28601": "iso885911", + "28603": "iso885913", + "28604": "iso885914", + "28605": "iso885915", + "28606": "iso885916", + "windows874": { + "type": "_sbcs", + "chars": "€����…�����������‘’“”•–—�������� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "win874": "windows874", + "cp874": "windows874", + "windows1250": { + "type": "_sbcs", + "chars": "€�‚�„…†‡�‰Š‹ŚŤŽŹ�‘’“”•–—�™š›śťžź ˇ˘Ł¤Ą¦§¨©Ş«¬­®Ż°±˛ł´µ¶·¸ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙" + }, + "win1250": "windows1250", + "cp1250": "windows1250", + "windows1251": { + "type": "_sbcs", + "chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊЌЋЏђ‘’“”•–—�™љ›њќћџ ЎўЈ¤Ґ¦§Ё©Є«¬­®Ї°±Ііґµ¶·ё№є»јЅѕїАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "win1251": "windows1251", + "cp1251": "windows1251", + "windows1252": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ�Ž��‘’“”•–—˜™š›œ�žŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "win1252": "windows1252", + "cp1252": "windows1252", + "windows1253": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡�‰�‹�����‘’“”•–—�™�›���� ΅Ά£¤¥¦§¨©�«¬­®―°±²³΄µ¶·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�" + }, + "win1253": "windows1253", + "cp1253": "windows1253", + "windows1254": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ����‘’“”•–—˜™š›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ" + }, + "win1254": "windows1254", + "cp1254": "windows1254", + "windows1255": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰�‹�����‘’“”•–—˜™�›���� ¡¢£₪¥¦§¨©×«¬­®¯°±²³´µ¶·¸¹÷»¼½¾¿ְֱֲֳִֵֶַָֹֺֻּֽ־ֿ׀ׁׂ׃װױײ׳״�������אבגדהוזחטיךכלםמןנסעףפץצקרשת��‎‏�" + }, + "win1255": "windows1255", + "cp1255": "windows1255", + "windows1256": { + "type": "_sbcs", + "chars": "€پ‚ƒ„…†‡ˆ‰ٹ‹Œچژڈگ‘’“”•–—ک™ڑ›œ‌‍ں ،¢£¤¥¦§¨©ھ«¬­®¯°±²³´µ¶·¸¹؛»¼½¾؟ہءآأؤإئابةتثجحخدذرزسشصض×طظعغـفقكàلâمنهوçèéêëىيîïًٌٍَôُِ÷ّùْûü‎‏ے" + }, + "win1256": "windows1256", + "cp1256": "windows1256", + "windows1257": { + "type": "_sbcs", + "chars": "€�‚�„…†‡�‰�‹�¨ˇ¸�‘’“”•–—�™�›�¯˛� �¢£¤�¦§Ø©Ŗ«¬­®Æ°±²³´µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž˙" + }, + "win1257": "windows1257", + "cp1257": "windows1257", + "windows1258": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰�‹Œ����‘’“”•–—˜™�›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "win1258": "windows1258", + "cp1258": "windows1258", + "iso88591": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "cp28591": "iso88591", + "iso88592": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ą˘Ł¤ĽŚ§¨ŠŞŤŹ­ŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙" + }, + "cp28592": "iso88592", + "iso88593": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ħ˘£¤�Ĥ§¨İŞĞĴ­�Ż°ħ²³´µĥ·¸ışğĵ½�żÀÁÂ�ÄĊĈÇÈÉÊËÌÍÎÏ�ÑÒÓÔĠÖ×ĜÙÚÛÜŬŜßàáâ�äċĉçèéêëìíîï�ñòóôġö÷ĝùúûüŭŝ˙" + }, + "cp28593": "iso88593", + "iso88594": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄĸŖ¤ĨĻ§¨ŠĒĢŦ­Ž¯°ą˛ŗ´ĩļˇ¸šēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖ×ØŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙" + }, + "cp28594": "iso88594", + "iso88595": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ЁЂЃЄЅІЇЈЉЊЋЌ­ЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ" + }, + "cp28595": "iso88595", + "iso88596": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ���¤�������،­�������������؛���؟�ءآأؤإئابةتثجحخدذرزسشصضطظعغ�����ـفقكلمنهوىيًٌٍَُِّْ�������������" + }, + "cp28596": "iso88596", + "iso88597": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ‘’£€₯¦§¨©ͺ«¬­�―°±²³΄΅Ά·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�" + }, + "cp28597": "iso88597", + "iso88598": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ �¢£¤¥¦§¨©×«¬­®¯°±²³´µ¶·¸¹÷»¼½¾��������������������������������‗אבגדהוזחטיךכלםמןנסעףפץצקרשת��‎‏�" + }, + "cp28598": "iso88598", + "iso88599": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ" + }, + "cp28599": "iso88599", + "iso885910": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄĒĢĪĨĶ§ĻĐŠŦŽ­ŪŊ°ąēģīĩķ·ļđšŧž―ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ" + }, + "cp28600": "iso885910", + "iso885911": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "cp28601": "iso885911", + "iso885913": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ”¢£¤„¦§Ø©Ŗ«¬­®Æ°±²³“µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž’" + }, + "cp28603": "iso885913", + "iso885914": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ḃḃ£ĊċḊ§Ẁ©ẂḋỲ­®ŸḞḟĠġṀṁ¶ṖẁṗẃṠỳẄẅṡÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŴÑÒÓÔÕÖṪØÙÚÛÜÝŶßàáâãäåæçèéêëìíîïŵñòóôõöṫøùúûüýŷÿ" + }, + "cp28604": "iso885914", + "iso885915": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£€¥Š§š©ª«¬­®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "cp28605": "iso885915", + "iso885916": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄąŁ€„Š§š©Ș«Ź­źŻ°±ČłŽ”¶·žčș»ŒœŸżÀÁÂĂÄĆÆÇÈÉÊËÌÍÎÏĐŃÒÓÔŐÖŚŰÙÚÛÜĘȚßàáâăäćæçèéêëìíîïđńòóôőöśűùúûüęțÿ" + }, + "cp28606": "iso885916", + "cp437": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm437": "cp437", + "csibm437": "cp437", + "cp737": { + "type": "_sbcs", + "chars": "ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρσςτυφχψ░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ωάέήϊίόύϋώΆΈΉΊΌΎΏ±≥≤ΪΫ÷≈°∙·√ⁿ²■ " + }, + "ibm737": "cp737", + "csibm737": "cp737", + "cp775": { + "type": "_sbcs", + "chars": "ĆüéāäģåćłēŖŗīŹÄÅÉæÆōöĢ¢ŚśÖÜø£ØפĀĪóŻżź”¦©®¬½¼Ł«»░▒▓│┤ĄČĘĖ╣║╗╝ĮŠ┐└┴┬├─┼ŲŪ╚╔╩╦╠═╬Žąčęėįšųūž┘┌█▄▌▐▀ÓßŌŃõÕµńĶķĻļņĒŅ’­±“¾¶§÷„°∙·¹³²■ " + }, + "ibm775": "cp775", + "csibm775": "cp775", + "cp850": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm850": "cp850", + "csibm850": "cp850", + "cp852": { + "type": "_sbcs", + "chars": "ÇüéâäůćçłëŐőîŹÄĆÉĹĺôöĽľŚśÖÜŤťŁ×čáíóúĄąŽžĘ꬟Ⱥ«»░▒▓│┤ÁÂĚŞ╣║╗╝Żż┐└┴┬├─┼Ăă╚╔╩╦╠═╬¤đĐĎËďŇÍÎě┘┌█▄ŢŮ▀ÓßÔŃńňŠšŔÚŕŰýÝţ´­˝˛ˇ˘§÷¸°¨˙űŘř■ " + }, + "ibm852": "cp852", + "csibm852": "cp852", + "cp855": { + "type": "_sbcs", + "chars": "ђЂѓЃёЁєЄѕЅіІїЇјЈљЉњЊћЋќЌўЎџЏюЮъЪаАбБцЦдДеЕфФгГ«»░▒▓│┤хХиИ╣║╗╝йЙ┐└┴┬├─┼кК╚╔╩╦╠═╬¤лЛмМнНоОп┘┌█▄Пя▀ЯрРсСтТуУжЖвВьЬ№­ыЫзЗшШэЭщЩчЧ§■ " + }, + "ibm855": "cp855", + "csibm855": "cp855", + "cp856": { + "type": "_sbcs", + "chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת�£�×����������®¬½¼�«»░▒▓│┤���©╣║╗╝¢¥┐└┴┬├─┼��╚╔╩╦╠═╬¤���������┘┌█▄¦�▀������µ�������¯´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm856": "cp856", + "csibm856": "cp856", + "cp857": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîıÄÅÉæÆôöòûùİÖÜø£ØŞşáíóúñÑĞ𿮬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ºªÊËÈ�ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµ�×ÚÛÙìÿ¯´­±�¾¶§÷¸°¨·¹³²■ " + }, + "ibm857": "cp857", + "csibm857": "cp857", + "cp858": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈ€ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm858": "cp858", + "csibm858": "cp858", + "cp860": { + "type": "_sbcs", + "chars": "ÇüéâãàÁçêÊèÍÔìÃÂÉÀÈôõòÚùÌÕÜ¢£Ù₧ÓáíóúñѪº¿Ò¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm860": "cp860", + "csibm860": "cp860", + "cp861": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèÐðÞÄÅÉæÆôöþûÝýÖÜø£Ø₧ƒáíóúÁÍÓÚ¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm861": "cp861", + "csibm861": "cp861", + "cp862": { + "type": "_sbcs", + "chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm862": "cp862", + "csibm862": "cp862", + "cp863": { + "type": "_sbcs", + "chars": "ÇüéâÂà¶çêëèïî‗À§ÉÈÊôËÏûù¤ÔÜ¢£ÙÛƒ¦´óú¨¸³¯Î⌐¬½¼¾«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm863": "cp863", + "csibm863": "cp863", + "cp864": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$٪&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~°·∙√▒─│┼┤┬├┴┐┌└┘β∞φ±½¼≈«»ﻷﻸ��ﻻﻼ� ­ﺂ£¤ﺄ��ﺎﺏﺕﺙ،ﺝﺡﺥ٠١٢٣٤٥٦٧٨٩ﻑ؛ﺱﺵﺹ؟¢ﺀﺁﺃﺅﻊﺋﺍﺑﺓﺗﺛﺟﺣﺧﺩﺫﺭﺯﺳﺷﺻﺿﻁﻅﻋﻏ¦¬÷×ﻉـﻓﻗﻛﻟﻣﻧﻫﻭﻯﻳﺽﻌﻎﻍﻡﹽّﻥﻩﻬﻰﻲﻐﻕﻵﻶﻝﻙﻱ■�" + }, + "ibm864": "cp864", + "csibm864": "cp864", + "cp865": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø₧ƒáíóúñѪº¿⌐¬½¼¡«¤░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm865": "cp865", + "csibm865": "cp865", + "cp866": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№¤■ " + }, + "ibm866": "cp866", + "csibm866": "cp866", + "cp869": { + "type": "_sbcs", + "chars": "������Ά�·¬¦‘’Έ―ΉΊΪΌ��ΎΫ©Ώ²³ά£έήίϊΐόύΑΒΓΔΕΖΗ½ΘΙ«»░▒▓│┤ΚΛΜΝ╣║╗╝ΞΟ┐└┴┬├─┼ΠΡ╚╔╩╦╠═╬ΣΤΥΦΧΨΩαβγ┘┌█▄δε▀ζηθικλμνξοπρσςτ΄­±υφχ§ψ΅°¨ωϋΰώ■ " + }, + "ibm869": "cp869", + "csibm869": "cp869", + "cp922": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®‾°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŠÑÒÓÔÕÖ×ØÙÚÛÜÝŽßàáâãäåæçèéêëìíîïšñòóôõö÷øùúûüýžÿ" + }, + "ibm922": "cp922", + "csibm922": "cp922", + "cp1046": { + "type": "_sbcs", + "chars": "ﺈ×÷ﹱˆ■│─┐┌└┘ﹹﹻﹽﹿﹷﺊﻰﻳﻲﻎﻏﻐﻶﻸﻺﻼ ¤ﺋﺑﺗﺛﺟﺣ،­ﺧﺳ٠١٢٣٤٥٦٧٨٩ﺷ؛ﺻﺿﻊ؟ﻋءآأؤإئابةتثجحخدذرزسشصضطﻇعغﻌﺂﺄﺎﻓـفقكلمنهوىيًٌٍَُِّْﻗﻛﻟﻵﻷﻹﻻﻣﻧﻬﻩ�" + }, + "ibm1046": "cp1046", + "csibm1046": "cp1046", + "cp1124": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ЁЂҐЄЅІЇЈЉЊЋЌ­ЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђґєѕіїјљњћќ§ўџ" + }, + "ibm1124": "cp1124", + "csibm1124": "cp1124", + "cp1125": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёҐґЄєІіЇї·√№¤■ " + }, + "ibm1125": "cp1125", + "csibm1125": "cp1125", + "cp1129": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§œ©ª«¬­®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "ibm1129": "cp1129", + "csibm1129": "cp1129", + "cp1133": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ກຂຄງຈສຊຍດຕຖທນບປຜຝພຟມຢຣລວຫອຮ���ຯະາຳິີຶືຸູຼັົຽ���ເແໂໃໄ່້໊໋໌ໍໆ�ໜໝ₭����������������໐໑໒໓໔໕໖໗໘໙��¢¬¦�" + }, + "ibm1133": "cp1133", + "csibm1133": "cp1133", + "cp1161": { + "type": "_sbcs", + "chars": "��������������������������������่กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู้๊๋€฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛¢¬¦ " + }, + "ibm1161": "cp1161", + "csibm1161": "cp1161", + "cp1162": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "ibm1162": "cp1162", + "csibm1162": "cp1162", + "cp1163": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£€¥¦§œ©ª«¬­®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "ibm1163": "cp1163", + "csibm1163": "cp1163", + "maccroatian": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®Š™´¨≠ŽØ∞±≤≥∆µ∂∑∏š∫ªºΩžø¿¡¬√ƒ≈Ć«Č… ÀÃÕŒœĐ—“”‘’÷◊�©⁄¤‹›Æ»–·‚„‰ÂćÁčÈÍÎÏÌÓÔđÒÚÛÙıˆ˜¯πË˚¸Êæˇ" + }, + "maccyrillic": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°¢£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµ∂ЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤" + }, + "macgreek": { + "type": "_sbcs", + "chars": "Ĺ²É³ÖÜ΅àâä΄¨çéèê룙î‰ôö¦­ùûü†ΓΔΘΛΞΠß®©ΣΪ§≠°·Α±≤≥¥ΒΕΖΗΙΚΜΦΫΨΩάΝ¬ΟΡ≈Τ«»… ΥΧΆΈœ–―“”‘’÷ΉΊΌΎέήίόΏύαβψδεφγηιξκλμνοπώρστθωςχυζϊϋΐΰ�" + }, + "maciceland": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûüÝ°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤ÐðÞþý·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macroman": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macromania": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ĂŞ∞±≤≥¥µ∂∑∏π∫ªºΩăş¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›Ţţ‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macthai": { + "type": "_sbcs", + "chars": "«»…“”�•‘’� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู​–—฿เแโใไๅๆ็่้๊๋์ํ™๏๐๑๒๓๔๕๖๗๘๙®©����" + }, + "macturkish": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸĞğİıŞş‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙ�ˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macukraine": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°Ґ£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµґЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤" + }, + "koi8r": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8u": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґ╝╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪Ґ╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8ru": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґў╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪ҐЎ©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8t": { + "type": "_sbcs", + "chars": "қғ‚Ғ„…†‡�‰ҳ‹ҲҷҶ�Қ‘’“”•–—�™�›�����ӯӮё¤ӣ¦§���«¬­®�°±²Ё�Ӣ¶·�№�»���©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "armscii8": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ �և։)(»«—.՝,-֊…՜՛՞ԱաԲբԳգԴդԵեԶզԷէԸըԹթԺժԻիԼլԽխԾծԿկՀհՁձՂղՃճՄմՅյՆնՇշՈոՉչՊպՋջՌռՍսՎվՏտՐրՑցՒւՓփՔքՕօՖֆ՚�" + }, + "rk1048": { + "type": "_sbcs", + "chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊҚҺЏђ‘’“”•–—�™љ›њқһџ ҰұӘ¤Ө¦§Ё©Ғ«¬­®Ү°±Ііөµ¶·ё№ғ»әҢңүАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "tcvn": { + "type": "_sbcs", + "chars": "\u0000ÚỤ\u0003ỪỬỮ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010ỨỰỲỶỸÝỴ\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ÀẢÃÁẠẶẬÈẺẼÉẸỆÌỈĨÍỊÒỎÕÓỌỘỜỞỠỚỢÙỦŨ ĂÂÊÔƠƯĐăâêôơưđẶ̀̀̉̃́àảãáạẲằẳẵắẴẮẦẨẪẤỀặầẩẫấậèỂẻẽéẹềểễếệìỉỄẾỒĩíịòỔỏõóọồổỗốộờởỡớợùỖủũúụừửữứựỳỷỹýỵỐ" + }, + "georgianacademy": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზთიკლმნოპჟრსტუფქღყშჩცძწჭხჯჰჱჲჳჴჵჶçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "georgianps": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზჱთიკლმნჲოპჟრსტჳუფქღყშჩცძწჭხჴჯჰჵæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "pt154": { + "type": "_sbcs", + "chars": "ҖҒӮғ„…ҶҮҲүҠӢҢҚҺҸҗ‘’“”•–—ҳҷҡӣңқһҹ ЎўЈӨҘҰ§Ё©Ә«¬ӯ®Ҝ°ұІіҙө¶·ё№ә»јҪҫҝАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "viscii": { + "type": "_sbcs", + "chars": "\u0000\u0001Ẳ\u0003\u0004ẴẪ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013Ỷ\u0015\u0016\u0017\u0018Ỹ\u001a\u001b\u001c\u001dỴ\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ẠẮẰẶẤẦẨẬẼẸẾỀỂỄỆỐỒỔỖỘỢỚỜỞỊỎỌỈỦŨỤỲÕắằặấầẩậẽẹếềểễệốồổỗỠƠộờởịỰỨỪỬơớƯÀÁÂÃẢĂẳẵÈÉÊẺÌÍĨỳĐứÒÓÔạỷừửÙÚỹỵÝỡưàáâãảăữẫèéêẻìíĩỉđựòóôõỏọụùúũủýợỮ" + }, + "iso646cn": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#¥%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������" + }, + "iso646jp": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[¥]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������" + }, + "hproman8": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ÀÂÈÊËÎÏ´ˋˆ¨˜ÙÛ₤¯Ýý°ÇçÑñ¡¿¤£¥§ƒ¢âêôûáéóúàèòùäëöüÅîØÆåíøæÄìÖÜÉïßÔÁÃãÐðÍÌÓÒÕõŠšÚŸÿÞþ·µ¶¾—¼½ªº«■»±�" + }, + "macintosh": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "ascii": { + "type": "_sbcs", + "chars": "��������������������������������������������������������������������������������������������������������������������������������" + }, + "tis620": { + "type": "_sbcs", + "chars": "���������������������������������กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" } } -// https://fetch.spec.whatwg.org/#abort-fetch -function abortFetch (p, request, responseObject, error) { - // Note: AbortSignal.reason was added in node v17.2.0 - // which would give us an undefined error to reject with. - // Remove this once node v16 is no longer supported. - if (!error) { - error = new DOMException('The operation was aborted.', 'AbortError') - } +/***/ }), - // 1. Reject promise with error. - p.reject(error) +/***/ 41080: +/***/ ((module) => { - // 2. If request’s body is not null and is readable, then cancel request’s - // body with error. - if (request.body != null && isReadable(request.body?.stream)) { - request.body.stream.cancel(error).catch((err) => { - if (err.code === 'ERR_INVALID_STATE') { - // Node bug? - return - } - throw err - }) - } +"use strict"; - // 3. If responseObject is null, then return. - if (responseObject == null) { - return - } - // 4. Let response be responseObject’s response. - const response = responseObject[kState] +// Manually added data to be used by sbcs codec in addition to generated one. - // 5. If response’s body is not null and is readable, then error response’s - // body with error. - if (response.body != null && isReadable(response.body?.stream)) { - response.body.stream.cancel(error).catch((err) => { - if (err.code === 'ERR_INVALID_STATE') { - // Node bug? - return - } - throw err - }) - } -} +module.exports = { + // Not supported by iconv, not sure why. + "10029": "maccenteuro", + "maccenteuro": { + "type": "_sbcs", + "chars": "ÄĀāÉĄÖÜáąČäčĆć鏟ĎíďĒēĖóėôöõúĚěü†°Ę£§•¶ß®©™ę¨≠ģĮįĪ≤≥īĶ∂∑łĻļĽľĹĺŅņѬ√ńŇ∆«»… ňŐÕőŌ–—“”‘’÷◊ōŔŕŘ‹›řŖŗŠ‚„šŚśÁŤťÍŽžŪÓÔūŮÚůŰűŲųÝýķŻŁżĢˇ" + }, -// https://fetch.spec.whatwg.org/#fetching -function fetching ({ - request, - processRequestBodyChunkLength, - processRequestEndOfBody, - processResponse, - processResponseEndOfBody, - processResponseConsumeBody, - useParallelQueue = false, - dispatcher // undici -}) { - // 1. Let taskDestination be null. - let taskDestination = null + "808": "cp808", + "ibm808": "cp808", + "cp808": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№€■ " + }, - // 2. Let crossOriginIsolatedCapability be false. - let crossOriginIsolatedCapability = false + "mik": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя└┴┬├─┼╣║╚╔╩╦╠═╬┐░▒▓│┤№§╗╝┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, - // 3. If request’s client is non-null, then: - if (request.client != null) { - // 1. Set taskDestination to request’s client’s global object. - taskDestination = request.client.globalObject + // Aliases of generated encodings. + "ascii8bit": "ascii", + "usascii": "ascii", + "ansix34": "ascii", + "ansix341968": "ascii", + "ansix341986": "ascii", + "csascii": "ascii", + "cp367": "ascii", + "ibm367": "ascii", + "isoir6": "ascii", + "iso646us": "ascii", + "iso646irv": "ascii", + "us": "ascii", + + "latin1": "iso88591", + "latin2": "iso88592", + "latin3": "iso88593", + "latin4": "iso88594", + "latin5": "iso88599", + "latin6": "iso885910", + "latin7": "iso885913", + "latin8": "iso885914", + "latin9": "iso885915", + "latin10": "iso885916", + + "csisolatin1": "iso88591", + "csisolatin2": "iso88592", + "csisolatin3": "iso88593", + "csisolatin4": "iso88594", + "csisolatincyrillic": "iso88595", + "csisolatinarabic": "iso88596", + "csisolatingreek" : "iso88597", + "csisolatinhebrew": "iso88598", + "csisolatin5": "iso88599", + "csisolatin6": "iso885910", + + "l1": "iso88591", + "l2": "iso88592", + "l3": "iso88593", + "l4": "iso88594", + "l5": "iso88599", + "l6": "iso885910", + "l7": "iso885913", + "l8": "iso885914", + "l9": "iso885915", + "l10": "iso885916", + + "isoir14": "iso646jp", + "isoir57": "iso646cn", + "isoir100": "iso88591", + "isoir101": "iso88592", + "isoir109": "iso88593", + "isoir110": "iso88594", + "isoir144": "iso88595", + "isoir127": "iso88596", + "isoir126": "iso88597", + "isoir138": "iso88598", + "isoir148": "iso88599", + "isoir157": "iso885910", + "isoir166": "tis620", + "isoir179": "iso885913", + "isoir199": "iso885914", + "isoir203": "iso885915", + "isoir226": "iso885916", + + "cp819": "iso88591", + "ibm819": "iso88591", + + "cyrillic": "iso88595", + + "arabic": "iso88596", + "arabic8": "iso88596", + "ecma114": "iso88596", + "asmo708": "iso88596", + + "greek" : "iso88597", + "greek8" : "iso88597", + "ecma118" : "iso88597", + "elot928" : "iso88597", + + "hebrew": "iso88598", + "hebrew8": "iso88598", + + "turkish": "iso88599", + "turkish8": "iso88599", + + "thai": "iso885911", + "thai8": "iso885911", + + "celtic": "iso885914", + "celtic8": "iso885914", + "isoceltic": "iso885914", + + "tis6200": "tis620", + "tis62025291": "tis620", + "tis62025330": "tis620", + + "10000": "macroman", + "10006": "macgreek", + "10007": "maccyrillic", + "10079": "maciceland", + "10081": "macturkish", + + "cspc8codepage437": "cp437", + "cspc775baltic": "cp775", + "cspc850multilingual": "cp850", + "cspcp852": "cp852", + "cspc862latinhebrew": "cp862", + "cpgr": "cp869", + + "msee": "cp1250", + "mscyrl": "cp1251", + "msansi": "cp1252", + "msgreek": "cp1253", + "msturk": "cp1254", + "mshebr": "cp1255", + "msarab": "cp1256", + "winbaltrim": "cp1257", + + "cp20866": "koi8r", + "20866": "koi8r", + "ibm878": "koi8r", + "cskoi8r": "koi8r", + + "cp21866": "koi8u", + "21866": "koi8u", + "ibm1168": "koi8u", + + "strk10482002": "rk1048", + + "tcvn5712": "tcvn", + "tcvn57121": "tcvn", + + "gb198880": "iso646cn", + "cn": "iso646cn", + + "csiso14jisc6220ro": "iso646jp", + "jisc62201969ro": "iso646jp", + "jp": "iso646jp", + + "cshproman8": "hproman8", + "r8": "hproman8", + "roman8": "hproman8", + "xroman8": "hproman8", + "ibm1051": "hproman8", + + "mac": "macintosh", + "csmacintosh": "macintosh", +}; - // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin - // isolated capability. - crossOriginIsolatedCapability = - request.client.crossOriginIsolatedCapability - } - // 4. If useParallelQueue is true, then set taskDestination to the result of - // starting a new parallel queue. - // TODO - // 5. Let timingInfo be a new fetch timing info whose start time and - // post-redirect start time are the coarsened shared current time given - // crossOriginIsolatedCapability. - const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability) - const timingInfo = createOpaqueTimingInfo({ - startTime: currenTime - }) +/***/ }), - // 6. Let fetchParams be a new fetch params whose - // request is request, - // timing info is timingInfo, - // process request body chunk length is processRequestBodyChunkLength, - // process request end-of-body is processRequestEndOfBody, - // process response is processResponse, - // process response consume body is processResponseConsumeBody, - // process response end-of-body is processResponseEndOfBody, - // task destination is taskDestination, - // and cross-origin isolated capability is crossOriginIsolatedCapability. - const fetchParams = { - controller: new Fetch(dispatcher), - request, - timingInfo, - processRequestBodyChunkLength, - processRequestEndOfBody, - processResponse, - processResponseConsumeBody, - processResponseEndOfBody, - taskDestination, - crossOriginIsolatedCapability - } +/***/ 11155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 7. If request’s body is a byte sequence, then set request’s body to - // request’s body as a body. - // NOTE: Since fetching is only called from fetch, body should already be - // extracted. - assert(!request.body || request.body.stream) +"use strict"; - // 8. If request’s window is "client", then set request’s window to request’s - // client, if request’s client’s global object is a Window object; otherwise - // "no-window". - if (request.window === 'client') { - // TODO: What if request.client is null? - request.window = - request.client?.globalObject?.constructor?.name === 'Window' - ? request.client - : 'no-window' - } +var Buffer = (__nccwpck_require__(15118).Buffer); - // 9. If request’s origin is "client", then set request’s origin to request’s - // client’s origin. - if (request.origin === 'client') { - // TODO: What if request.client is null? - request.origin = request.client?.origin - } +// Note: UTF16-LE (or UCS2) codec is Node.js native. See encodings/internal.js - // 10. If all of the following conditions are true: - // TODO +// == UTF16-BE codec. ========================================================== - // 11. If request’s policy container is "client", then: - if (request.policyContainer === 'client') { - // 1. If request’s client is non-null, then set request’s policy - // container to a clone of request’s client’s policy container. [HTML] - if (request.client != null) { - request.policyContainer = clonePolicyContainer( - request.client.policyContainer - ) - } else { - // 2. Otherwise, set request’s policy container to a new policy - // container. - request.policyContainer = makePolicyContainer() - } - } +exports.utf16be = Utf16BECodec; +function Utf16BECodec() { +} - // 12. If request’s header list does not contain `Accept`, then: - if (!request.headersList.contains('accept')) { - // 1. Let value be `*/*`. - const value = '*/*' +Utf16BECodec.prototype.encoder = Utf16BEEncoder; +Utf16BECodec.prototype.decoder = Utf16BEDecoder; +Utf16BECodec.prototype.bomAware = true; - // 2. A user agent should set value to the first matching statement, if - // any, switching on request’s destination: - // "document" - // "frame" - // "iframe" - // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8` - // "image" - // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5` - // "style" - // `text/css,*/*;q=0.1` - // TODO - // 3. Append `Accept`/value to request’s header list. - request.headersList.append('accept', value) - } +// -- Encoding - // 13. If request’s header list does not contain `Accept-Language`, then - // user agents should append `Accept-Language`/an appropriate value to - // request’s header list. - if (!request.headersList.contains('accept-language')) { - request.headersList.append('accept-language', '*') - } +function Utf16BEEncoder() { +} - // 14. If request’s priority is null, then use request’s initiator and - // destination appropriately in setting request’s priority to a - // user-agent-defined object. - if (request.priority === null) { - // TODO - } +Utf16BEEncoder.prototype.write = function(str) { + var buf = Buffer.from(str, 'ucs2'); + for (var i = 0; i < buf.length; i += 2) { + var tmp = buf[i]; buf[i] = buf[i+1]; buf[i+1] = tmp; + } + return buf; +} - // 15. If request is a subresource request, then: - if (subresourceSet.has(request.destination)) { - // TODO - } +Utf16BEEncoder.prototype.end = function() { +} - // 16. Run main fetch given fetchParams. - mainFetch(fetchParams) - .catch(err => { - fetchParams.controller.terminate(err) - }) - // 17. Return fetchParam's controller - return fetchParams.controller +// -- Decoding + +function Utf16BEDecoder() { + this.overflowByte = -1; } -// https://fetch.spec.whatwg.org/#concept-main-fetch -async function mainFetch (fetchParams, recursive = false) { - // 1. Let request be fetchParams’s request. - const request = fetchParams.request +Utf16BEDecoder.prototype.write = function(buf) { + if (buf.length == 0) + return ''; - // 2. Let response be null. - let response = null + var buf2 = Buffer.alloc(buf.length + 1), + i = 0, j = 0; - // 3. If request’s local-URLs-only flag is set and request’s current URL is - // not local, then set response to a network error. - if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) { - response = makeNetworkError('local URLs only') - } + if (this.overflowByte !== -1) { + buf2[0] = buf[0]; + buf2[1] = this.overflowByte; + i = 1; j = 2; + } - // 4. Run report Content Security Policy violations for request. - // TODO + for (; i < buf.length-1; i += 2, j+= 2) { + buf2[j] = buf[i+1]; + buf2[j+1] = buf[i]; + } - // 5. Upgrade request to a potentially trustworthy URL, if appropriate. - tryUpgradeRequestToAPotentiallyTrustworthyURL(request) + this.overflowByte = (i == buf.length-1) ? buf[buf.length-1] : -1; - // 6. If should request be blocked due to a bad port, should fetching request - // be blocked as mixed content, or should request be blocked by Content - // Security Policy returns blocked, then set response to a network error. - if (requestBadPort(request) === 'blocked') { - response = makeNetworkError('bad port') - } - // TODO: should fetching request be blocked as mixed content? - // TODO: should request be blocked by Content Security Policy? + return buf2.slice(0, j).toString('ucs2'); +} - // 7. If request’s referrer policy is the empty string, then set request’s - // referrer policy to request’s policy container’s referrer policy. - if (request.referrerPolicy === '') { - request.referrerPolicy = request.policyContainer.referrerPolicy - } +Utf16BEDecoder.prototype.end = function() { +} - // 8. If request’s referrer is not "no-referrer", then set request’s - // referrer to the result of invoking determine request’s referrer. - if (request.referrer !== 'no-referrer') { - request.referrer = determineRequestsReferrer(request) - } - // 9. Set request’s current URL’s scheme to "https" if all of the following - // conditions are true: - // - request’s current URL’s scheme is "http" - // - request’s current URL’s host is a domain - // - Matching request’s current URL’s host per Known HSTS Host Domain Name - // Matching results in either a superdomain match with an asserted - // includeSubDomains directive or a congruent match (with or without an - // asserted includeSubDomains directive). [HSTS] - // TODO +// == UTF-16 codec ============================================================= +// Decoder chooses automatically from UTF-16LE and UTF-16BE using BOM and space-based heuristic. +// Defaults to UTF-16LE, as it's prevalent and default in Node. +// http://en.wikipedia.org/wiki/UTF-16 and http://encoding.spec.whatwg.org/#utf-16le +// Decoder default can be changed: iconv.decode(buf, 'utf16', {defaultEncoding: 'utf-16be'}); - // 10. If recursive is false, then run the remaining steps in parallel. - // TODO +// Encoder uses UTF-16LE and prepends BOM (which can be overridden with addBOM: false). - // 11. If response is null, then set response to the result of running - // the steps corresponding to the first matching statement: - if (response === null) { - response = await (async () => { - const currentURL = requestCurrentURL(request) +exports.utf16 = Utf16Codec; +function Utf16Codec(codecOptions, iconv) { + this.iconv = iconv; +} - if ( - // - request’s current URL’s origin is same origin with request’s origin, - // and request’s response tainting is "basic" - (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') || - // request’s current URL’s scheme is "data" - (currentURL.protocol === 'data:') || - // - request’s mode is "navigate" or "websocket" - (request.mode === 'navigate' || request.mode === 'websocket') - ) { - // 1. Set request’s response tainting to "basic". - request.responseTainting = 'basic' +Utf16Codec.prototype.encoder = Utf16Encoder; +Utf16Codec.prototype.decoder = Utf16Decoder; - // 2. Return the result of running scheme fetch given fetchParams. - return await schemeFetch(fetchParams) - } - // request’s mode is "same-origin" - if (request.mode === 'same-origin') { - // 1. Return a network error. - return makeNetworkError('request mode cannot be "same-origin"') - } +// -- Encoding (pass-through) - // request’s mode is "no-cors" - if (request.mode === 'no-cors') { - // 1. If request’s redirect mode is not "follow", then return a network - // error. - if (request.redirect !== 'follow') { - return makeNetworkError( - 'redirect mode cannot be "follow" for "no-cors" request' - ) - } +function Utf16Encoder(options, codec) { + options = options || {}; + if (options.addBOM === undefined) + options.addBOM = true; + this.encoder = codec.iconv.getEncoder('utf-16le', options); +} - // 2. Set request’s response tainting to "opaque". - request.responseTainting = 'opaque' +Utf16Encoder.prototype.write = function(str) { + return this.encoder.write(str); +} - // 3. Return the result of running scheme fetch given fetchParams. - return await schemeFetch(fetchParams) - } +Utf16Encoder.prototype.end = function() { + return this.encoder.end(); +} - // request’s current URL’s scheme is not an HTTP(S) scheme - if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) { - // Return a network error. - return makeNetworkError('URL scheme must be a HTTP(S) scheme') - } - // - request’s use-CORS-preflight flag is set - // - request’s unsafe-request flag is set and either request’s method is - // not a CORS-safelisted method or CORS-unsafe request-header names with - // request’s header list is not empty - // 1. Set request’s response tainting to "cors". - // 2. Let corsWithPreflightResponse be the result of running HTTP fetch - // given fetchParams and true. - // 3. If corsWithPreflightResponse is a network error, then clear cache - // entries using request. - // 4. Return corsWithPreflightResponse. - // TODO +// -- Decoding - // Otherwise - // 1. Set request’s response tainting to "cors". - request.responseTainting = 'cors' +function Utf16Decoder(options, codec) { + this.decoder = null; + this.initialBytes = []; + this.initialBytesLen = 0; - // 2. Return the result of running HTTP fetch given fetchParams. - return await httpFetch(fetchParams) - })() - } + this.options = options || {}; + this.iconv = codec.iconv; +} - // 12. If recursive is true, then return response. - if (recursive) { - return response - } +Utf16Decoder.prototype.write = function(buf) { + if (!this.decoder) { + // Codec is not chosen yet. Accumulate initial bytes. + this.initialBytes.push(buf); + this.initialBytesLen += buf.length; + + if (this.initialBytesLen < 16) // We need more bytes to use space heuristic (see below) + return ''; - // 13. If response is not a network error and response is not a filtered - // response, then: - if (response.status !== 0 && !response.internalResponse) { - // If request’s response tainting is "cors", then: - if (request.responseTainting === 'cors') { - // 1. Let headerNames be the result of extracting header list values - // given `Access-Control-Expose-Headers` and response’s header list. - // TODO - // 2. If request’s credentials mode is not "include" and headerNames - // contains `*`, then set response’s CORS-exposed header-name list to - // all unique header names in response’s header list. - // TODO - // 3. Otherwise, if headerNames is not null or failure, then set - // response’s CORS-exposed header-name list to headerNames. - // TODO + // We have enough bytes -> detect endianness. + var buf = Buffer.concat(this.initialBytes), + encoding = detectEncoding(buf, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + this.initialBytes.length = this.initialBytesLen = 0; } - // Set response to the following filtered response with response as its - // internal response, depending on request’s response tainting: - if (request.responseTainting === 'basic') { - response = filterResponse(response, 'basic') - } else if (request.responseTainting === 'cors') { - response = filterResponse(response, 'cors') - } else if (request.responseTainting === 'opaque') { - response = filterResponse(response, 'opaque') - } else { - assert(false) + return this.decoder.write(buf); +} + +Utf16Decoder.prototype.end = function() { + if (!this.decoder) { + var buf = Buffer.concat(this.initialBytes), + encoding = detectEncoding(buf, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + + var res = this.decoder.write(buf), + trail = this.decoder.end(); + + return trail ? (res + trail) : res; } - } + return this.decoder.end(); +} - // 14. Let internalResponse be response, if response is a network error, - // and response’s internal response otherwise. - let internalResponse = - response.status === 0 ? response : response.internalResponse +function detectEncoding(buf, defaultEncoding) { + var enc = defaultEncoding || 'utf-16le'; - // 15. If internalResponse’s URL list is empty, then set it to a clone of - // request’s URL list. - if (internalResponse.urlList.length === 0) { - internalResponse.urlList.push(...request.urlList) - } + if (buf.length >= 2) { + // Check BOM. + if (buf[0] == 0xFE && buf[1] == 0xFF) // UTF-16BE BOM + enc = 'utf-16be'; + else if (buf[0] == 0xFF && buf[1] == 0xFE) // UTF-16LE BOM + enc = 'utf-16le'; + else { + // No BOM found. Try to deduce encoding from initial content. + // Most of the time, the content has ASCII chars (U+00**), but the opposite (U+**00) is uncommon. + // So, we count ASCII as if it was LE or BE, and decide from that. + var asciiCharsLE = 0, asciiCharsBE = 0, // Counts of chars in both positions + _len = Math.min(buf.length - (buf.length % 2), 64); // Len is always even. + + for (var i = 0; i < _len; i += 2) { + if (buf[i] === 0 && buf[i+1] !== 0) asciiCharsBE++; + if (buf[i] !== 0 && buf[i+1] === 0) asciiCharsLE++; + } - // 16. If request’s timing allow failed flag is unset, then set - // internalResponse’s timing allow passed flag. - if (!request.timingAllowFailed) { - response.timingAllowPassed = true - } + if (asciiCharsBE > asciiCharsLE) + enc = 'utf-16be'; + else if (asciiCharsBE < asciiCharsLE) + enc = 'utf-16le'; + } + } - // 17. If response is not a network error and any of the following returns - // blocked - // - should internalResponse to request be blocked as mixed content - // - should internalResponse to request be blocked by Content Security Policy - // - should internalResponse to request be blocked due to its MIME type - // - should internalResponse to request be blocked due to nosniff - // TODO + return enc; +} - // 18. If response’s type is "opaque", internalResponse’s status is 206, - // internalResponse’s range-requested flag is set, and request’s header - // list does not contain `Range`, then set response and internalResponse - // to a network error. - if ( - response.type === 'opaque' && - internalResponse.status === 206 && - internalResponse.rangeRequested && - !request.headers.contains('range') - ) { - response = internalResponse = makeNetworkError() - } - // 19. If response is not a network error and either request’s method is - // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status, - // set internalResponse’s body to null and disregard any enqueuing toward - // it (if any). - if ( - response.status !== 0 && - (request.method === 'HEAD' || - request.method === 'CONNECT' || - nullBodyStatus.includes(internalResponse.status)) - ) { - internalResponse.body = null - fetchParams.controller.dump = true - } - // 20. If request’s integrity metadata is not the empty string, then: - if (request.integrity) { - // 1. Let processBodyError be this step: run fetch finale given fetchParams - // and a network error. - const processBodyError = (reason) => - fetchFinale(fetchParams, makeNetworkError(reason)) - // 2. If request’s response tainting is "opaque", or response’s body is null, - // then run processBodyError and abort these steps. - if (request.responseTainting === 'opaque' || response.body == null) { - processBodyError(response.error) - return - } +/***/ }), - // 3. Let processBody given bytes be these steps: - const processBody = (bytes) => { - // 1. If bytes do not match request’s integrity metadata, - // then run processBodyError and abort these steps. [SRI] - if (!bytesMatch(bytes, request.integrity)) { - processBodyError('integrity mismatch') - return - } +/***/ 51644: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // 2. Set response’s body to bytes as a body. - response.body = safelyExtractBody(bytes)[0] +"use strict"; - // 3. Run fetch finale given fetchParams and response. - fetchFinale(fetchParams, response) - } +var Buffer = (__nccwpck_require__(15118).Buffer); - // 4. Fully read response’s body given processBody and processBodyError. - await fullyReadBody(response.body, processBody, processBodyError) - } else { - // 21. Otherwise, run fetch finale given fetchParams and response. - fetchFinale(fetchParams, response) - } -} +// UTF-7 codec, according to https://tools.ietf.org/html/rfc2152 +// See also below a UTF-7-IMAP codec, according to http://tools.ietf.org/html/rfc3501#section-5.1.3 -// https://fetch.spec.whatwg.org/#concept-scheme-fetch -// given a fetch params fetchParams -function schemeFetch (fetchParams) { - // Note: since the connection is destroyed on redirect, which sets fetchParams to a - // cancelled state, we do not want this condition to trigger *unless* there have been - // no redirects. See https://github.com/nodejs/undici/issues/1776 - // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. - if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { - return Promise.resolve(makeAppropriateNetworkError(fetchParams)) - } +exports.utf7 = Utf7Codec; +exports.unicode11utf7 = 'utf7'; // Alias UNICODE-1-1-UTF-7 +function Utf7Codec(codecOptions, iconv) { + this.iconv = iconv; +}; - // 2. Let request be fetchParams’s request. - const { request } = fetchParams +Utf7Codec.prototype.encoder = Utf7Encoder; +Utf7Codec.prototype.decoder = Utf7Decoder; +Utf7Codec.prototype.bomAware = true; - const { protocol: scheme } = requestCurrentURL(request) - // 3. Switch on request’s current URL’s scheme and run the associated steps: - switch (scheme) { - case 'about:': { - // If request’s current URL’s path is the string "blank", then return a new response - // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) », - // and body is the empty byte sequence as a body. +// -- Encoding - // Otherwise, return a network error. - return Promise.resolve(makeNetworkError('about scheme is not supported')) - } - case 'blob:': { - if (!resolveObjectURL) { - resolveObjectURL = (__nccwpck_require__(4300).resolveObjectURL) - } +var nonDirectChars = /[^A-Za-z0-9'\(\),-\.\/:\? \n\r\t]+/g; - // 1. Let blobURLEntry be request’s current URL’s blob URL entry. - const blobURLEntry = requestCurrentURL(request) +function Utf7Encoder(options, codec) { + this.iconv = codec.iconv; +} - // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 - // Buffer.resolveObjectURL does not ignore URL queries. - if (blobURLEntry.search.length !== 0) { - return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.')) - } +Utf7Encoder.prototype.write = function(str) { + // Naive implementation. + // Non-direct chars are encoded as "+-"; single "+" char is encoded as "+-". + return Buffer.from(str.replace(nonDirectChars, function(chunk) { + return "+" + (chunk === '+' ? '' : + this.iconv.encode(chunk, 'utf16-be').toString('base64').replace(/=+$/, '')) + + "-"; + }.bind(this))); +} - const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()) +Utf7Encoder.prototype.end = function() { +} - // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s - // object is not a Blob object, then return a network error. - if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) { - return Promise.resolve(makeNetworkError('invalid method')) - } - // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object. - const bodyWithType = safelyExtractBody(blobURLEntryObject) +// -- Decoding - // 4. Let body be bodyWithType’s body. - const body = bodyWithType[0] +function Utf7Decoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = ''; +} - // 5. Let length be body’s length, serialized and isomorphic encoded. - const length = isomorphicEncode(`${body.length}`) +var base64Regex = /[A-Za-z0-9\/+]/; +var base64Chars = []; +for (var i = 0; i < 256; i++) + base64Chars[i] = base64Regex.test(String.fromCharCode(i)); - // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence. - const type = bodyWithType[1] ?? '' +var plusChar = '+'.charCodeAt(0), + minusChar = '-'.charCodeAt(0), + andChar = '&'.charCodeAt(0); - // 7. Return a new response whose status message is `OK`, header list is - // « (`Content-Length`, length), (`Content-Type`, type) », and body is body. - const response = makeResponse({ - statusText: 'OK', - headersList: [ - ['content-length', { name: 'Content-Length', value: length }], - ['content-type', { name: 'Content-Type', value: type }] - ] - }) +Utf7Decoder.prototype.write = function(buf) { + var res = "", lastI = 0, + inBase64 = this.inBase64, + base64Accum = this.base64Accum; - response.body = body + // The decoder is more involved as we must handle chunks in stream. - return Promise.resolve(response) + for (var i = 0; i < buf.length; i++) { + if (!inBase64) { // We're in direct mode. + // Write direct chars until '+' + if (buf[i] == plusChar) { + res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars. + lastI = i+1; + inBase64 = true; + } + } else { // We decode base64. + if (!base64Chars[buf[i]]) { // Base64 ended. + if (i == lastI && buf[i] == minusChar) {// "+-" -> "+" + res += "+"; + } else { + var b64str = base64Accum + buf.slice(lastI, i).toString(); + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); + } + + if (buf[i] != minusChar) // Minus is absorbed after base64. + i--; + + lastI = i+1; + inBase64 = false; + base64Accum = ''; + } + } } - case 'data:': { - // 1. Let dataURLStruct be the result of running the - // data: URL processor on request’s current URL. - const currentURL = requestCurrentURL(request) - const dataURLStruct = dataURLProcessor(currentURL) - // 2. If dataURLStruct is failure, then return a - // network error. - if (dataURLStruct === 'failure') { - return Promise.resolve(makeNetworkError('failed to fetch the data URL')) - } + if (!inBase64) { + res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars. + } else { + var b64str = base64Accum + buf.slice(lastI).toString(); - // 3. Let mimeType be dataURLStruct’s MIME type, serialized. - const mimeType = serializeAMimeType(dataURLStruct.mimeType) + var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars. + base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future. + b64str = b64str.slice(0, canBeDecoded); - // 4. Return a response whose status message is `OK`, - // header list is « (`Content-Type`, mimeType) », - // and body is dataURLStruct’s body as a body. - return Promise.resolve(makeResponse({ - statusText: 'OK', - headersList: [ - ['content-type', { name: 'Content-Type', value: mimeType }] - ], - body: safelyExtractBody(dataURLStruct.body)[0] - })) - } - case 'file:': { - // For now, unfortunate as it is, file URLs are left as an exercise for the reader. - // When in doubt, return a network error. - return Promise.resolve(makeNetworkError('not implemented... yet...')) + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); } - case 'http:': - case 'https:': { - // Return the result of running HTTP fetch given fetchParams. - return httpFetch(fetchParams) - .catch((err) => makeNetworkError(err)) - } - default: { - return Promise.resolve(makeNetworkError('unknown scheme')) - } - } + this.inBase64 = inBase64; + this.base64Accum = base64Accum; + + return res; } -// https://fetch.spec.whatwg.org/#finalize-response -function finalizeResponse (fetchParams, response) { - // 1. Set fetchParams’s request’s done flag. - fetchParams.request.done = true +Utf7Decoder.prototype.end = function() { + var res = ""; + if (this.inBase64 && this.base64Accum.length > 0) + res = this.iconv.decode(Buffer.from(this.base64Accum, 'base64'), "utf16-be"); - // 2, If fetchParams’s process response done is not null, then queue a fetch - // task to run fetchParams’s process response done given response, with - // fetchParams’s task destination. - if (fetchParams.processResponseDone != null) { - queueMicrotask(() => fetchParams.processResponseDone(response)) - } + this.inBase64 = false; + this.base64Accum = ''; + return res; } -// https://fetch.spec.whatwg.org/#fetch-finale -function fetchFinale (fetchParams, response) { - // 1. If response is a network error, then: - if (response.type === 'error') { - // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ». - response.urlList = [fetchParams.request.urlList[0]] - // 2. Set response’s timing info to the result of creating an opaque timing - // info for fetchParams’s timing info. - response.timingInfo = createOpaqueTimingInfo({ - startTime: fetchParams.timingInfo.startTime - }) - } +// UTF-7-IMAP codec. +// RFC3501 Sec. 5.1.3 Modified UTF-7 (http://tools.ietf.org/html/rfc3501#section-5.1.3) +// Differences: +// * Base64 part is started by "&" instead of "+" +// * Direct characters are 0x20-0x7E, except "&" (0x26) +// * In Base64, "," is used instead of "/" +// * Base64 must not be used to represent direct characters. +// * No implicit shift back from Base64 (should always end with '-') +// * String must end in non-shifted position. +// * "-&" while in base64 is not allowed. - // 2. Let processResponseEndOfBody be the following steps: - const processResponseEndOfBody = () => { - // 1. Set fetchParams’s request’s done flag. - fetchParams.request.done = true - // If fetchParams’s process response end-of-body is not null, - // then queue a fetch task to run fetchParams’s process response - // end-of-body given response with fetchParams’s task destination. - if (fetchParams.processResponseEndOfBody != null) { - queueMicrotask(() => fetchParams.processResponseEndOfBody(response)) - } - } +exports.utf7imap = Utf7IMAPCodec; +function Utf7IMAPCodec(codecOptions, iconv) { + this.iconv = iconv; +}; - // 3. If fetchParams’s process response is non-null, then queue a fetch task - // to run fetchParams’s process response given response, with fetchParams’s - // task destination. - if (fetchParams.processResponse != null) { - queueMicrotask(() => fetchParams.processResponse(response)) - } +Utf7IMAPCodec.prototype.encoder = Utf7IMAPEncoder; +Utf7IMAPCodec.prototype.decoder = Utf7IMAPDecoder; +Utf7IMAPCodec.prototype.bomAware = true; - // 4. If response’s body is null, then run processResponseEndOfBody. - if (response.body == null) { - processResponseEndOfBody() - } else { - // 5. Otherwise: - // 1. Let transformStream be a new a TransformStream. +// -- Encoding - // 2. Let identityTransformAlgorithm be an algorithm which, given chunk, - // enqueues chunk in transformStream. - const identityTransformAlgorithm = (chunk, controller) => { - controller.enqueue(chunk) - } +function Utf7IMAPEncoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = Buffer.alloc(6); + this.base64AccumIdx = 0; +} - // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm - // and flushAlgorithm set to processResponseEndOfBody. - const transformStream = new TransformStream({ - start () {}, - transform: identityTransformAlgorithm, - flush: processResponseEndOfBody - }, { - size () { - return 1 - } - }, { - size () { - return 1 - } - }) +Utf7IMAPEncoder.prototype.write = function(str) { + var inBase64 = this.inBase64, + base64Accum = this.base64Accum, + base64AccumIdx = this.base64AccumIdx, + buf = Buffer.alloc(str.length*5 + 10), bufIdx = 0; - // 4. Set response’s body to the result of piping response’s body through transformStream. - response.body = { stream: response.body.stream.pipeThrough(transformStream) } - } + for (var i = 0; i < str.length; i++) { + var uChar = str.charCodeAt(i); + if (0x20 <= uChar && uChar <= 0x7E) { // Direct character or '&'. + if (inBase64) { + if (base64AccumIdx > 0) { + bufIdx += buf.write(base64Accum.slice(0, base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx); + base64AccumIdx = 0; + } - // 6. If fetchParams’s process response consume body is non-null, then: - if (fetchParams.processResponseConsumeBody != null) { - // 1. Let processBody given nullOrBytes be this step: run fetchParams’s - // process response consume body given response and nullOrBytes. - const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes) + buf[bufIdx++] = minusChar; // Write '-', then go to direct mode. + inBase64 = false; + } - // 2. Let processBodyError be this step: run fetchParams’s process - // response consume body given response and failure. - const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure) + if (!inBase64) { + buf[bufIdx++] = uChar; // Write direct character - // 3. If response’s body is null, then queue a fetch task to run processBody - // given null, with fetchParams’s task destination. - if (response.body == null) { - queueMicrotask(() => processBody(null)) - } else { - // 4. Otherwise, fully read response’s body given processBody, processBodyError, - // and fetchParams’s task destination. - return fullyReadBody(response.body, processBody, processBodyError) + if (uChar === andChar) // Ampersand -> '&-' + buf[bufIdx++] = minusChar; + } + + } else { // Non-direct character + if (!inBase64) { + buf[bufIdx++] = andChar; // Write '&', then go to base64 mode. + inBase64 = true; + } + if (inBase64) { + base64Accum[base64AccumIdx++] = uChar >> 8; + base64Accum[base64AccumIdx++] = uChar & 0xFF; + + if (base64AccumIdx == base64Accum.length) { + bufIdx += buf.write(base64Accum.toString('base64').replace(/\//g, ','), bufIdx); + base64AccumIdx = 0; + } + } + } } - return Promise.resolve() - } + + this.inBase64 = inBase64; + this.base64AccumIdx = base64AccumIdx; + + return buf.slice(0, bufIdx); } -// https://fetch.spec.whatwg.org/#http-fetch -async function httpFetch (fetchParams) { - // 1. Let request be fetchParams’s request. - const request = fetchParams.request +Utf7IMAPEncoder.prototype.end = function() { + var buf = Buffer.alloc(10), bufIdx = 0; + if (this.inBase64) { + if (this.base64AccumIdx > 0) { + bufIdx += buf.write(this.base64Accum.slice(0, this.base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx); + this.base64AccumIdx = 0; + } - // 2. Let response be null. - let response = null + buf[bufIdx++] = minusChar; // Write '-', then go to direct mode. + this.inBase64 = false; + } - // 3. Let actualResponse be null. - let actualResponse = null + return buf.slice(0, bufIdx); +} - // 4. Let timingInfo be fetchParams’s timing info. - const timingInfo = fetchParams.timingInfo - // 5. If request’s service-workers mode is "all", then: - if (request.serviceWorkers === 'all') { - // TODO - } +// -- Decoding - // 6. If response is null, then: - if (response === null) { - // 1. If makeCORSPreflight is true and one of these conditions is true: - // TODO +function Utf7IMAPDecoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = ''; +} - // 2. If request’s redirect mode is "follow", then set request’s - // service-workers mode to "none". - if (request.redirect === 'follow') { - request.serviceWorkers = 'none' - } +var base64IMAPChars = base64Chars.slice(); +base64IMAPChars[','.charCodeAt(0)] = true; - // 3. Set response and actualResponse to the result of running - // HTTP-network-or-cache fetch given fetchParams. - actualResponse = response = await httpNetworkOrCacheFetch(fetchParams) +Utf7IMAPDecoder.prototype.write = function(buf) { + var res = "", lastI = 0, + inBase64 = this.inBase64, + base64Accum = this.base64Accum; - // 4. If request’s response tainting is "cors" and a CORS check - // for request and response returns failure, then return a network error. - if ( - request.responseTainting === 'cors' && - corsCheck(request, response) === 'failure' - ) { - return makeNetworkError('cors failure') - } + // The decoder is more involved as we must handle chunks in stream. + // It is forgiving, closer to standard UTF-7 (for example, '-' is optional at the end). - // 5. If the TAO check for request and response returns failure, then set - // request’s timing allow failed flag. - if (TAOCheck(request, response) === 'failure') { - request.timingAllowFailed = true - } - } + for (var i = 0; i < buf.length; i++) { + if (!inBase64) { // We're in direct mode. + // Write direct chars until '&' + if (buf[i] == andChar) { + res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars. + lastI = i+1; + inBase64 = true; + } + } else { // We decode base64. + if (!base64IMAPChars[buf[i]]) { // Base64 ended. + if (i == lastI && buf[i] == minusChar) { // "&-" -> "&" + res += "&"; + } else { + var b64str = base64Accum + buf.slice(lastI, i).toString().replace(/,/g, '/'); + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); + } - // 7. If either request’s response tainting or response’s type - // is "opaque", and the cross-origin resource policy check with - // request’s origin, request’s client, request’s destination, - // and actualResponse returns blocked, then return a network error. - if ( - (request.responseTainting === 'opaque' || response.type === 'opaque') && - crossOriginResourcePolicyCheck( - request.origin, - request.client, - request.destination, - actualResponse - ) === 'blocked' - ) { - return makeNetworkError('blocked') - } + if (buf[i] != minusChar) // Minus may be absorbed after base64. + i--; - // 8. If actualResponse’s status is a redirect status, then: - if (redirectStatusSet.has(actualResponse.status)) { - // 1. If actualResponse’s status is not 303, request’s body is not null, - // and the connection uses HTTP/2, then user agents may, and are even - // encouraged to, transmit an RST_STREAM frame. - // See, https://github.com/whatwg/fetch/issues/1288 - if (request.redirect !== 'manual') { - fetchParams.controller.connection.destroy() + lastI = i+1; + inBase64 = false; + base64Accum = ''; + } + } } - // 2. Switch on request’s redirect mode: - if (request.redirect === 'error') { - // Set response to a network error. - response = makeNetworkError('unexpected redirect') - } else if (request.redirect === 'manual') { - // Set response to an opaque-redirect filtered response whose internal - // response is actualResponse. - // NOTE(spec): On the web this would return an `opaqueredirect` response, - // but that doesn't make sense server side. - // See https://github.com/nodejs/undici/issues/1193. - response = actualResponse - } else if (request.redirect === 'follow') { - // Set response to the result of running HTTP-redirect fetch given - // fetchParams and response. - response = await httpRedirectFetch(fetchParams, response) + if (!inBase64) { + res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars. } else { - assert(false) + var b64str = base64Accum + buf.slice(lastI).toString().replace(/,/g, '/'); + + var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars. + base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future. + b64str = b64str.slice(0, canBeDecoded); + + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); } - } - // 9. Set response’s timing info to timingInfo. - response.timingInfo = timingInfo + this.inBase64 = inBase64; + this.base64Accum = base64Accum; - // 10. Return response. - return response + return res; } -// https://fetch.spec.whatwg.org/#http-redirect-fetch -function httpRedirectFetch (fetchParams, response) { - // 1. Let request be fetchParams’s request. - const request = fetchParams.request - - // 2. Let actualResponse be response, if response is not a filtered response, - // and response’s internal response otherwise. - const actualResponse = response.internalResponse - ? response.internalResponse - : response +Utf7IMAPDecoder.prototype.end = function() { + var res = ""; + if (this.inBase64 && this.base64Accum.length > 0) + res = this.iconv.decode(Buffer.from(this.base64Accum, 'base64'), "utf16-be"); - // 3. Let locationURL be actualResponse’s location URL given request’s current - // URL’s fragment. - let locationURL + this.inBase64 = false; + this.base64Accum = ''; + return res; +} - try { - locationURL = responseLocationURL( - actualResponse, - requestCurrentURL(request).hash - ) - // 4. If locationURL is null, then return response. - if (locationURL == null) { - return response - } - } catch (err) { - // 5. If locationURL is failure, then return a network error. - return Promise.resolve(makeNetworkError(err)) - } - // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network - // error. - if (!urlIsHttpHttpsScheme(locationURL)) { - return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme')) - } - // 7. If request’s redirect count is 20, then return a network error. - if (request.redirectCount === 20) { - return Promise.resolve(makeNetworkError('redirect count exceeded')) - } +/***/ }), - // 8. Increase request’s redirect count by 1. - request.redirectCount += 1 +/***/ 67961: +/***/ ((__unused_webpack_module, exports) => { - // 9. If request’s mode is "cors", locationURL includes credentials, and - // request’s origin is not same origin with locationURL’s origin, then return - // a network error. - if ( - request.mode === 'cors' && - (locationURL.username || locationURL.password) && - !sameOrigin(request, locationURL) - ) { - return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')) - } +"use strict"; - // 10. If request’s response tainting is "cors" and locationURL includes - // credentials, then return a network error. - if ( - request.responseTainting === 'cors' && - (locationURL.username || locationURL.password) - ) { - return Promise.resolve(makeNetworkError( - 'URL cannot contain credentials for request mode "cors"' - )) - } - // 11. If actualResponse’s status is not 303, request’s body is non-null, - // and request’s body’s source is null, then return a network error. - if ( - actualResponse.status !== 303 && - request.body != null && - request.body.source == null - ) { - return Promise.resolve(makeNetworkError()) - } +var BOMChar = '\uFEFF'; - // 12. If one of the following is true - // - actualResponse’s status is 301 or 302 and request’s method is `POST` - // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD` - if ( - ([301, 302].includes(actualResponse.status) && request.method === 'POST') || - (actualResponse.status === 303 && - !GET_OR_HEAD.includes(request.method)) - ) { - // then: - // 1. Set request’s method to `GET` and request’s body to null. - request.method = 'GET' - request.body = null +exports.PrependBOM = PrependBOMWrapper +function PrependBOMWrapper(encoder, options) { + this.encoder = encoder; + this.addBOM = true; +} - // 2. For each headerName of request-body-header name, delete headerName from - // request’s header list. - for (const headerName of requestBodyHeader) { - request.headersList.delete(headerName) +PrependBOMWrapper.prototype.write = function(str) { + if (this.addBOM) { + str = BOMChar + str; + this.addBOM = false; } - } - - // 13. If request’s current URL’s origin is not same origin with locationURL’s - // origin, then for each headerName of CORS non-wildcard request-header name, - // delete headerName from request’s header list. - if (!sameOrigin(requestCurrentURL(request), locationURL)) { - // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name - request.headersList.delete('authorization') - // https://fetch.spec.whatwg.org/#authentication-entries - request.headersList.delete('proxy-authorization', true) + return this.encoder.write(str); +} - // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. - request.headersList.delete('cookie') - request.headersList.delete('host') - } +PrependBOMWrapper.prototype.end = function() { + return this.encoder.end(); +} - // 14. If request’s body is non-null, then set request’s body to the first return - // value of safely extracting request’s body’s source. - if (request.body != null) { - assert(request.body.source != null) - request.body = safelyExtractBody(request.body.source)[0] - } - // 15. Let timingInfo be fetchParams’s timing info. - const timingInfo = fetchParams.timingInfo +//------------------------------------------------------------------------------ - // 16. Set timingInfo’s redirect end time and post-redirect start time to the - // coarsened shared current time given fetchParams’s cross-origin isolated - // capability. - timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = - coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability) +exports.StripBOM = StripBOMWrapper; +function StripBOMWrapper(decoder, options) { + this.decoder = decoder; + this.pass = false; + this.options = options || {}; +} - // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s - // redirect start time to timingInfo’s start time. - if (timingInfo.redirectStartTime === 0) { - timingInfo.redirectStartTime = timingInfo.startTime - } +StripBOMWrapper.prototype.write = function(buf) { + var res = this.decoder.write(buf); + if (this.pass || !res) + return res; - // 18. Append locationURL to request’s URL list. - request.urlList.push(locationURL) + if (res[0] === BOMChar) { + res = res.slice(1); + if (typeof this.options.stripBOM === 'function') + this.options.stripBOM(); + } - // 19. Invoke set request’s referrer policy on redirect on request and - // actualResponse. - setRequestReferrerPolicyOnRedirect(request, actualResponse) + this.pass = true; + return res; +} - // 20. Return the result of running main fetch given fetchParams and true. - return mainFetch(fetchParams, true) +StripBOMWrapper.prototype.end = function() { + return this.decoder.end(); } -// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch -async function httpNetworkOrCacheFetch ( - fetchParams, - isAuthenticationFetch = false, - isNewConnectionFetch = false -) { - // 1. Let request be fetchParams’s request. - const request = fetchParams.request - // 2. Let httpFetchParams be null. - let httpFetchParams = null - // 3. Let httpRequest be null. - let httpRequest = null +/***/ }), - // 4. Let response be null. - let response = null +/***/ 30393: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 5. Let storedResponse be null. - // TODO: cache +"use strict"; - // 6. Let httpCache be null. - const httpCache = null +var Buffer = (__nccwpck_require__(14300).Buffer); +// Note: not polyfilled with safer-buffer on a purpose, as overrides Buffer - // 7. Let the revalidatingFlag be unset. - const revalidatingFlag = false +// == Extend Node primitives to use iconv-lite ================================= - // 8. Run these steps, but abort when the ongoing fetch is terminated: +module.exports = function (iconv) { + var original = undefined; // Place to keep original methods. - // 1. If request’s window is "no-window" and request’s redirect mode is - // "error", then set httpFetchParams to fetchParams and httpRequest to - // request. - if (request.window === 'no-window' && request.redirect === 'error') { - httpFetchParams = fetchParams - httpRequest = request - } else { - // Otherwise: + // Node authors rewrote Buffer internals to make it compatible with + // Uint8Array and we cannot patch key functions since then. + // Note: this does use older Buffer API on a purpose + iconv.supportsNodeEncodingsExtension = !(Buffer.from || new Buffer(0) instanceof Uint8Array); - // 1. Set httpRequest to a clone of request. - httpRequest = makeRequest(request) + iconv.extendNodeEncodings = function extendNodeEncodings() { + if (original) return; + original = {}; - // 2. Set httpFetchParams to a copy of fetchParams. - httpFetchParams = { ...fetchParams } + if (!iconv.supportsNodeEncodingsExtension) { + console.error("ACTION NEEDED: require('iconv-lite').extendNodeEncodings() is not supported in your version of Node"); + console.error("See more info at https://github.com/ashtuchkin/iconv-lite/wiki/Node-v4-compatibility"); + return; + } - // 3. Set httpFetchParams’s request to httpRequest. - httpFetchParams.request = httpRequest - } + var nodeNativeEncodings = { + 'hex': true, 'utf8': true, 'utf-8': true, 'ascii': true, 'binary': true, + 'base64': true, 'ucs2': true, 'ucs-2': true, 'utf16le': true, 'utf-16le': true, + }; - // 3. Let includeCredentials be true if one of - const includeCredentials = - request.credentials === 'include' || - (request.credentials === 'same-origin' && - request.responseTainting === 'basic') + Buffer.isNativeEncoding = function(enc) { + return enc && nodeNativeEncodings[enc.toLowerCase()]; + } - // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s - // body is non-null; otherwise null. - const contentLength = httpRequest.body ? httpRequest.body.length : null + // -- SlowBuffer ----------------------------------------------------------- + var SlowBuffer = (__nccwpck_require__(14300).SlowBuffer); - // 5. Let contentLengthHeaderValue be null. - let contentLengthHeaderValue = null + original.SlowBufferToString = SlowBuffer.prototype.toString; + SlowBuffer.prototype.toString = function(encoding, start, end) { + encoding = String(encoding || 'utf8').toLowerCase(); - // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or - // `PUT`, then set contentLengthHeaderValue to `0`. - if ( - httpRequest.body == null && - ['POST', 'PUT'].includes(httpRequest.method) - ) { - contentLengthHeaderValue = '0' - } + // Use native conversion when possible + if (Buffer.isNativeEncoding(encoding)) + return original.SlowBufferToString.call(this, encoding, start, end); - // 7. If contentLength is non-null, then set contentLengthHeaderValue to - // contentLength, serialized and isomorphic encoded. - if (contentLength != null) { - contentLengthHeaderValue = isomorphicEncode(`${contentLength}`) - } + // Otherwise, use our decoding method. + if (typeof start == 'undefined') start = 0; + if (typeof end == 'undefined') end = this.length; + return iconv.decode(this.slice(start, end), encoding); + } - // 8. If contentLengthHeaderValue is non-null, then append - // `Content-Length`/contentLengthHeaderValue to httpRequest’s header - // list. - if (contentLengthHeaderValue != null) { - httpRequest.headersList.append('content-length', contentLengthHeaderValue) - } + original.SlowBufferWrite = SlowBuffer.prototype.write; + SlowBuffer.prototype.write = function(string, offset, length, encoding) { + // Support both (string, offset, length, encoding) + // and the legacy (string, encoding, offset, length) + if (isFinite(offset)) { + if (!isFinite(length)) { + encoding = length; + length = undefined; + } + } else { // legacy + var swap = encoding; + encoding = offset; + offset = length; + length = swap; + } - // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`, - // contentLengthHeaderValue) to httpRequest’s header list. + offset = +offset || 0; + var remaining = this.length - offset; + if (!length) { + length = remaining; + } else { + length = +length; + if (length > remaining) { + length = remaining; + } + } + encoding = String(encoding || 'utf8').toLowerCase(); - // 10. If contentLength is non-null and httpRequest’s keepalive is true, - // then: - if (contentLength != null && httpRequest.keepalive) { - // NOTE: keepalive is a noop outside of browser context. - } + // Use native conversion when possible + if (Buffer.isNativeEncoding(encoding)) + return original.SlowBufferWrite.call(this, string, offset, length, encoding); - // 11. If httpRequest’s referrer is a URL, then append - // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded, - // to httpRequest’s header list. - if (httpRequest.referrer instanceof URL) { - httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href)) - } + if (string.length > 0 && (length < 0 || offset < 0)) + throw new RangeError('attempt to write beyond buffer bounds'); - // 12. Append a request `Origin` header for httpRequest. - appendRequestOriginHeader(httpRequest) + // Otherwise, use our encoding method. + var buf = iconv.encode(string, encoding); + if (buf.length < length) length = buf.length; + buf.copy(this, offset, 0, length); + return length; + } - // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA] - appendFetchMetadata(httpRequest) + // -- Buffer --------------------------------------------------------------- - // 14. If httpRequest’s header list does not contain `User-Agent`, then - // user agents should append `User-Agent`/default `User-Agent` value to - // httpRequest’s header list. - if (!httpRequest.headersList.contains('user-agent')) { - httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node') - } + original.BufferIsEncoding = Buffer.isEncoding; + Buffer.isEncoding = function(encoding) { + return Buffer.isNativeEncoding(encoding) || iconv.encodingExists(encoding); + } - // 15. If httpRequest’s cache mode is "default" and httpRequest’s header - // list contains `If-Modified-Since`, `If-None-Match`, - // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set - // httpRequest’s cache mode to "no-store". - if ( - httpRequest.cache === 'default' && - (httpRequest.headersList.contains('if-modified-since') || - httpRequest.headersList.contains('if-none-match') || - httpRequest.headersList.contains('if-unmodified-since') || - httpRequest.headersList.contains('if-match') || - httpRequest.headersList.contains('if-range')) - ) { - httpRequest.cache = 'no-store' - } + original.BufferByteLength = Buffer.byteLength; + Buffer.byteLength = SlowBuffer.byteLength = function(str, encoding) { + encoding = String(encoding || 'utf8').toLowerCase(); - // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent - // no-cache cache-control header modification flag is unset, and - // httpRequest’s header list does not contain `Cache-Control`, then append - // `Cache-Control`/`max-age=0` to httpRequest’s header list. - if ( - httpRequest.cache === 'no-cache' && - !httpRequest.preventNoCacheCacheControlHeaderModification && - !httpRequest.headersList.contains('cache-control') - ) { - httpRequest.headersList.append('cache-control', 'max-age=0') - } + // Use native conversion when possible + if (Buffer.isNativeEncoding(encoding)) + return original.BufferByteLength.call(this, str, encoding); - // 17. If httpRequest’s cache mode is "no-store" or "reload", then: - if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') { - // 1. If httpRequest’s header list does not contain `Pragma`, then append - // `Pragma`/`no-cache` to httpRequest’s header list. - if (!httpRequest.headersList.contains('pragma')) { - httpRequest.headersList.append('pragma', 'no-cache') - } + // Slow, I know, but we don't have a better way yet. + return iconv.encode(str, encoding).length; + } - // 2. If httpRequest’s header list does not contain `Cache-Control`, - // then append `Cache-Control`/`no-cache` to httpRequest’s header list. - if (!httpRequest.headersList.contains('cache-control')) { - httpRequest.headersList.append('cache-control', 'no-cache') - } - } + original.BufferToString = Buffer.prototype.toString; + Buffer.prototype.toString = function(encoding, start, end) { + encoding = String(encoding || 'utf8').toLowerCase(); - // 18. If httpRequest’s header list contains `Range`, then append - // `Accept-Encoding`/`identity` to httpRequest’s header list. - if (httpRequest.headersList.contains('range')) { - httpRequest.headersList.append('accept-encoding', 'identity') - } + // Use native conversion when possible + if (Buffer.isNativeEncoding(encoding)) + return original.BufferToString.call(this, encoding, start, end); - // 19. Modify httpRequest’s header list per HTTP. Do not append a given - // header if httpRequest’s header list contains that header’s name. - // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129 - if (!httpRequest.headersList.contains('accept-encoding')) { - if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) { - httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate') - } else { - httpRequest.headersList.append('accept-encoding', 'gzip, deflate') - } - } + // Otherwise, use our decoding method. + if (typeof start == 'undefined') start = 0; + if (typeof end == 'undefined') end = this.length; + return iconv.decode(this.slice(start, end), encoding); + } - httpRequest.headersList.delete('host') + original.BufferWrite = Buffer.prototype.write; + Buffer.prototype.write = function(string, offset, length, encoding) { + var _offset = offset, _length = length, _encoding = encoding; + // Support both (string, offset, length, encoding) + // and the legacy (string, encoding, offset, length) + if (isFinite(offset)) { + if (!isFinite(length)) { + encoding = length; + length = undefined; + } + } else { // legacy + var swap = encoding; + encoding = offset; + offset = length; + length = swap; + } - // 20. If includeCredentials is true, then: - if (includeCredentials) { - // 1. If the user agent is not configured to block cookies for httpRequest - // (see section 7 of [COOKIES]), then: - // TODO: credentials - // 2. If httpRequest’s header list does not contain `Authorization`, then: - // TODO: credentials - } + encoding = String(encoding || 'utf8').toLowerCase(); - // 21. If there’s a proxy-authentication entry, use it as appropriate. - // TODO: proxy-authentication + // Use native conversion when possible + if (Buffer.isNativeEncoding(encoding)) + return original.BufferWrite.call(this, string, _offset, _length, _encoding); - // 22. Set httpCache to the result of determining the HTTP cache - // partition, given httpRequest. - // TODO: cache + offset = +offset || 0; + var remaining = this.length - offset; + if (!length) { + length = remaining; + } else { + length = +length; + if (length > remaining) { + length = remaining; + } + } - // 23. If httpCache is null, then set httpRequest’s cache mode to - // "no-store". - if (httpCache == null) { - httpRequest.cache = 'no-store' - } + if (string.length > 0 && (length < 0 || offset < 0)) + throw new RangeError('attempt to write beyond buffer bounds'); - // 24. If httpRequest’s cache mode is neither "no-store" nor "reload", - // then: - if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') { - // TODO: cache - } + // Otherwise, use our encoding method. + var buf = iconv.encode(string, encoding); + if (buf.length < length) length = buf.length; + buf.copy(this, offset, 0, length); + return length; - // 9. If aborted, then return the appropriate network error for fetchParams. - // TODO + // TODO: Set _charsWritten. + } - // 10. If response is null, then: - if (response == null) { - // 1. If httpRequest’s cache mode is "only-if-cached", then return a - // network error. - if (httpRequest.mode === 'only-if-cached') { - return makeNetworkError('only if cached') - } - // 2. Let forwardResponse be the result of running HTTP-network fetch - // given httpFetchParams, includeCredentials, and isNewConnectionFetch. - const forwardResponse = await httpNetworkFetch( - httpFetchParams, - includeCredentials, - isNewConnectionFetch - ) + // -- Readable ------------------------------------------------------------- + if (iconv.supportsStreams) { + var Readable = (__nccwpck_require__(12781).Readable); - // 3. If httpRequest’s method is unsafe and forwardResponse’s status is - // in the range 200 to 399, inclusive, invalidate appropriate stored - // responses in httpCache, as per the "Invalidation" chapter of HTTP - // Caching, and set storedResponse to null. [HTTP-CACHING] - if ( - !safeMethodsSet.has(httpRequest.method) && - forwardResponse.status >= 200 && - forwardResponse.status <= 399 - ) { - // TODO: cache - } + original.ReadableSetEncoding = Readable.prototype.setEncoding; + Readable.prototype.setEncoding = function setEncoding(enc, options) { + // Use our own decoder, it has the same interface. + // We cannot use original function as it doesn't handle BOM-s. + this._readableState.decoder = iconv.getDecoder(enc, options); + this._readableState.encoding = enc; + } - // 4. If the revalidatingFlag is set and forwardResponse’s status is 304, - // then: - if (revalidatingFlag && forwardResponse.status === 304) { - // TODO: cache + Readable.prototype.collect = iconv._collect; + } } - // 5. If response is null, then: - if (response == null) { - // 1. Set response to forwardResponse. - response = forwardResponse + // Remove iconv-lite Node primitive extensions. + iconv.undoExtendNodeEncodings = function undoExtendNodeEncodings() { + if (!iconv.supportsNodeEncodingsExtension) + return; + if (!original) + throw new Error("require('iconv-lite').undoExtendNodeEncodings(): Nothing to undo; extendNodeEncodings() is not called.") - // 2. Store httpRequest and forwardResponse in httpCache, as per the - // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING] - // TODO: cache - } - } + delete Buffer.isNativeEncoding; - // 11. Set response’s URL list to a clone of httpRequest’s URL list. - response.urlList = [...httpRequest.urlList] + var SlowBuffer = (__nccwpck_require__(14300).SlowBuffer); - // 12. If httpRequest’s header list contains `Range`, then set response’s - // range-requested flag. - if (httpRequest.headersList.contains('range')) { - response.rangeRequested = true - } + SlowBuffer.prototype.toString = original.SlowBufferToString; + SlowBuffer.prototype.write = original.SlowBufferWrite; - // 13. Set response’s request-includes-credentials to includeCredentials. - response.requestIncludesCredentials = includeCredentials + Buffer.isEncoding = original.BufferIsEncoding; + Buffer.byteLength = original.BufferByteLength; + Buffer.prototype.toString = original.BufferToString; + Buffer.prototype.write = original.BufferWrite; - // 14. If response’s status is 401, httpRequest’s response tainting is not - // "cors", includeCredentials is true, and request’s window is an environment - // settings object, then: - // TODO + if (iconv.supportsStreams) { + var Readable = (__nccwpck_require__(12781).Readable); - // 15. If response’s status is 407, then: - if (response.status === 407) { - // 1. If request’s window is "no-window", then return a network error. - if (request.window === 'no-window') { - return makeNetworkError() + Readable.prototype.setEncoding = original.ReadableSetEncoding; + delete Readable.prototype.collect; + } + + original = undefined; } +} - // 2. ??? - // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams. - if (isCancelled(fetchParams)) { - return makeAppropriateNetworkError(fetchParams) - } +/***/ }), - // 4. Prompt the end user as appropriate in request’s window and store - // the result as a proxy-authentication entry. [HTTP-AUTH] - // TODO: Invoke some kind of callback? +/***/ 19032: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 5. Set response to the result of running HTTP-network-or-cache fetch given - // fetchParams. - // TODO - return makeNetworkError('proxy authentication required') - } +"use strict"; - // 16. If all of the following are true - if ( - // response’s status is 421 - response.status === 421 && - // isNewConnectionFetch is false - !isNewConnectionFetch && - // request’s body is null, or request’s body is non-null and request’s body’s source is non-null - (request.body == null || request.body.source != null) - ) { - // then: - // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. - if (isCancelled(fetchParams)) { - return makeAppropriateNetworkError(fetchParams) - } +// Some environments don't have global Buffer (e.g. React Native). +// Solution would be installing npm modules "buffer" and "stream" explicitly. +var Buffer = (__nccwpck_require__(15118).Buffer); - // 2. Set response to the result of running HTTP-network-or-cache - // fetch given fetchParams, isAuthenticationFetch, and true. +var bomHandling = __nccwpck_require__(67961), + iconv = module.exports; - // TODO (spec): The spec doesn't specify this but we need to cancel - // the active response before we can start a new one. - // https://github.com/whatwg/fetch/issues/1293 - fetchParams.controller.connection.destroy() +// All codecs and aliases are kept here, keyed by encoding name/alias. +// They are lazy loaded in `iconv.getCodec` from `encodings/index.js`. +iconv.encodings = null; - response = await httpNetworkOrCacheFetch( - fetchParams, - isAuthenticationFetch, - true - ) - } +// Characters emitted in case of error. +iconv.defaultCharUnicode = '�'; +iconv.defaultCharSingleByte = '?'; - // 17. If isAuthenticationFetch is true, then create an authentication entry - if (isAuthenticationFetch) { - // TODO - } +// Public API. +iconv.encode = function encode(str, encoding, options) { + str = "" + (str || ""); // Ensure string. - // 18. Return response. - return response + var encoder = iconv.getEncoder(encoding, options); + + var res = encoder.write(str); + var trail = encoder.end(); + + return (trail && trail.length > 0) ? Buffer.concat([res, trail]) : res; } -// https://fetch.spec.whatwg.org/#http-network-fetch -async function httpNetworkFetch ( - fetchParams, - includeCredentials = false, - forceNewConnection = false -) { - assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed) +iconv.decode = function decode(buf, encoding, options) { + if (typeof buf === 'string') { + if (!iconv.skipDecodeWarning) { + console.error('Iconv-lite warning: decode()-ing strings is deprecated. Refer to https://github.com/ashtuchkin/iconv-lite/wiki/Use-Buffers-when-decoding'); + iconv.skipDecodeWarning = true; + } - fetchParams.controller.connection = { - abort: null, - destroyed: false, - destroy (err) { - if (!this.destroyed) { - this.destroyed = true - this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) - } + buf = Buffer.from("" + (buf || ""), "binary"); // Ensure buffer. } - } - // 1. Let request be fetchParams’s request. - const request = fetchParams.request + var decoder = iconv.getDecoder(encoding, options); - // 2. Let response be null. - let response = null + var res = decoder.write(buf); + var trail = decoder.end(); - // 3. Let timingInfo be fetchParams’s timing info. - const timingInfo = fetchParams.timingInfo + return trail ? (res + trail) : res; +} - // 4. Let httpCache be the result of determining the HTTP cache partition, - // given request. - // TODO: cache - const httpCache = null +iconv.encodingExists = function encodingExists(enc) { + try { + iconv.getCodec(enc); + return true; + } catch (e) { + return false; + } +} - // 5. If httpCache is null, then set request’s cache mode to "no-store". - if (httpCache == null) { - request.cache = 'no-store' - } +// Legacy aliases to convert functions +iconv.toEncoding = iconv.encode; +iconv.fromEncoding = iconv.decode; - // 6. Let networkPartitionKey be the result of determining the network - // partition key given request. - // TODO +// Search for a codec in iconv.encodings. Cache codec data in iconv._codecDataCache. +iconv._codecDataCache = {}; +iconv.getCodec = function getCodec(encoding) { + if (!iconv.encodings) + iconv.encodings = __nccwpck_require__(82733); // Lazy load all encoding definitions. + + // Canonicalize encoding name: strip all non-alphanumeric chars and appended year. + var enc = iconv._canonicalizeEncoding(encoding); - // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise - // "no". - const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars + // Traverse iconv.encodings to find actual codec. + var codecOptions = {}; + while (true) { + var codec = iconv._codecDataCache[enc]; + if (codec) + return codec; - // 8. Switch on request’s mode: - if (request.mode === 'websocket') { - // Let connection be the result of obtaining a WebSocket connection, - // given request’s current URL. - // TODO - } else { - // Let connection be the result of obtaining a connection, given - // networkPartitionKey, request’s current URL’s origin, - // includeCredentials, and forceNewConnection. - // TODO - } + var codecDef = iconv.encodings[enc]; - // 9. Run these steps, but abort when the ongoing fetch is terminated: + switch (typeof codecDef) { + case "string": // Direct alias to other encoding. + enc = codecDef; + break; - // 1. If connection is failure, then return a network error. + case "object": // Alias with options. Can be layered. + for (var key in codecDef) + codecOptions[key] = codecDef[key]; - // 2. Set timingInfo’s final connection timing info to the result of - // calling clamp and coarsen connection timing info with connection’s - // timing info, timingInfo’s post-redirect start time, and fetchParams’s - // cross-origin isolated capability. + if (!codecOptions.encodingName) + codecOptions.encodingName = enc; + + enc = codecDef.type; + break; - // 3. If connection is not an HTTP/2 connection, request’s body is non-null, - // and request’s body’s source is null, then append (`Transfer-Encoding`, - // `chunked`) to request’s header list. + case "function": // Codec itself. + if (!codecOptions.encodingName) + codecOptions.encodingName = enc; - // 4. Set timingInfo’s final network-request start time to the coarsened - // shared current time given fetchParams’s cross-origin isolated - // capability. + // The codec function must load all tables and return object with .encoder and .decoder methods. + // It'll be called only once (for each different options object). + codec = new codecDef(codecOptions, iconv); - // 5. Set response to the result of making an HTTP request over connection - // using request with the following caveats: + iconv._codecDataCache[codecOptions.encodingName] = codec; // Save it to be reused later. + return codec; - // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS] - // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH] + default: + throw new Error("Encoding not recognized: '" + encoding + "' (searched as: '"+enc+"')"); + } + } +} - // - If request’s body is non-null, and request’s body’s source is null, - // then the user agent may have a buffer of up to 64 kibibytes and store - // a part of request’s body in that buffer. If the user agent reads from - // request’s body beyond that buffer’s size and the user agent needs to - // resend request, then instead return a network error. +iconv._canonicalizeEncoding = function(encoding) { + // Canonicalize encoding name: strip all non-alphanumeric chars and appended year. + return (''+encoding).toLowerCase().replace(/:\d{4}$|[^0-9a-z]/g, ""); +} - // - Set timingInfo’s final network-response start time to the coarsened - // shared current time given fetchParams’s cross-origin isolated capability, - // immediately after the user agent’s HTTP parser receives the first byte - // of the response (e.g., frame header bytes for HTTP/2 or response status - // line for HTTP/1.x). +iconv.getEncoder = function getEncoder(encoding, options) { + var codec = iconv.getCodec(encoding), + encoder = new codec.encoder(options, codec); - // - Wait until all the headers are transmitted. + if (codec.bomAware && options && options.addBOM) + encoder = new bomHandling.PrependBOM(encoder, options); - // - Any responses whose status is in the range 100 to 199, inclusive, - // and is not 101, are to be ignored, except for the purposes of setting - // timingInfo’s final network-response start time above. + return encoder; +} - // - If request’s header list contains `Transfer-Encoding`/`chunked` and - // response is transferred via HTTP/1.0 or older, then return a network - // error. +iconv.getDecoder = function getDecoder(encoding, options) { + var codec = iconv.getCodec(encoding), + decoder = new codec.decoder(options, codec); - // - If the HTTP request results in a TLS client certificate dialog, then: + if (codec.bomAware && !(options && options.stripBOM === false)) + decoder = new bomHandling.StripBOM(decoder, options); - // 1. If request’s window is an environment settings object, make the - // dialog available in request’s window. + return decoder; +} - // 2. Otherwise, return a network error. - // To transmit request’s body body, run these steps: - let requestBody = null - // 1. If body is null and fetchParams’s process request end-of-body is - // non-null, then queue a fetch task given fetchParams’s process request - // end-of-body and fetchParams’s task destination. - if (request.body == null && fetchParams.processRequestEndOfBody) { - queueMicrotask(() => fetchParams.processRequestEndOfBody()) - } else if (request.body != null) { - // 2. Otherwise, if body is non-null: +// Load extensions in Node. All of them are omitted in Browserify build via 'browser' field in package.json. +var nodeVer = typeof process !== 'undefined' && process.versions && process.versions.node; +if (nodeVer) { - // 1. Let processBodyChunk given bytes be these steps: - const processBodyChunk = async function * (bytes) { - // 1. If the ongoing fetch is terminated, then abort these steps. - if (isCancelled(fetchParams)) { - return - } + // Load streaming support in Node v0.10+ + var nodeVerArr = nodeVer.split(".").map(Number); + if (nodeVerArr[0] > 0 || nodeVerArr[1] >= 10) { + __nccwpck_require__(76409)(iconv); + } - // 2. Run this step in parallel: transmit bytes. - yield bytes + // Load Node primitive extensions. + __nccwpck_require__(30393)(iconv); +} - // 3. If fetchParams’s process request body is non-null, then run - // fetchParams’s process request body given bytes’s length. - fetchParams.processRequestBodyChunkLength?.(bytes.byteLength) - } +if (false) {} - // 2. Let processEndOfBody be these steps: - const processEndOfBody = () => { - // 1. If fetchParams is canceled, then abort these steps. - if (isCancelled(fetchParams)) { - return - } - // 2. If fetchParams’s process request end-of-body is non-null, - // then run fetchParams’s process request end-of-body. - if (fetchParams.processRequestEndOfBody) { - fetchParams.processRequestEndOfBody() - } - } +/***/ }), - // 3. Let processBodyError given e be these steps: - const processBodyError = (e) => { - // 1. If fetchParams is canceled, then abort these steps. - if (isCancelled(fetchParams)) { - return - } +/***/ 76409: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller. - if (e.name === 'AbortError') { - fetchParams.controller.abort() - } else { - fetchParams.controller.terminate(e) - } - } +"use strict"; - // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody, - // processBodyError, and fetchParams’s task destination. - requestBody = (async function * () { - try { - for await (const bytes of request.body.stream) { - yield * processBodyChunk(bytes) - } - processEndOfBody() - } catch (err) { - processBodyError(err) - } - })() - } - try { - // socket is only provided for websockets - const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody }) +var Buffer = (__nccwpck_require__(14300).Buffer), + Transform = (__nccwpck_require__(12781).Transform); - if (socket) { - response = makeResponse({ status, statusText, headersList, socket }) - } else { - const iterator = body[Symbol.asyncIterator]() - fetchParams.controller.next = () => iterator.next() - response = makeResponse({ status, statusText, headersList }) +// == Exports ================================================================== +module.exports = function(iconv) { + + // Additional Public API. + iconv.encodeStream = function encodeStream(encoding, options) { + return new IconvLiteEncoderStream(iconv.getEncoder(encoding, options), options); } - } catch (err) { - // 10. If aborted, then: - if (err.name === 'AbortError') { - // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame. - fetchParams.controller.connection.destroy() - // 2. Return the appropriate network error for fetchParams. - return makeAppropriateNetworkError(fetchParams, err) + iconv.decodeStream = function decodeStream(encoding, options) { + return new IconvLiteDecoderStream(iconv.getDecoder(encoding, options), options); } - return makeNetworkError(err) - } + iconv.supportsStreams = true; - // 11. Let pullAlgorithm be an action that resumes the ongoing fetch - // if it is suspended. - const pullAlgorithm = () => { - fetchParams.controller.resume() - } - // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s - // controller with reason, given reason. - const cancelAlgorithm = (reason) => { - fetchParams.controller.abort(reason) - } + // Not published yet. + iconv.IconvLiteEncoderStream = IconvLiteEncoderStream; + iconv.IconvLiteDecoderStream = IconvLiteDecoderStream; + iconv._collect = IconvLiteDecoderStream.prototype.collect; +}; - // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by - // the user agent. - // TODO - // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object - // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent. - // TODO +// == Encoder stream ======================================================= +function IconvLiteEncoderStream(conv, options) { + this.conv = conv; + options = options || {}; + options.decodeStrings = false; // We accept only strings, so we don't need to decode them. + Transform.call(this, options); +} - // 15. Let stream be a new ReadableStream. - // 16. Set up stream with pullAlgorithm set to pullAlgorithm, - // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to - // highWaterMark, and sizeAlgorithm set to sizeAlgorithm. - if (!ReadableStream) { - ReadableStream = (__nccwpck_require__(5356).ReadableStream) - } +IconvLiteEncoderStream.prototype = Object.create(Transform.prototype, { + constructor: { value: IconvLiteEncoderStream } +}); - const stream = new ReadableStream( - { - async start (controller) { - fetchParams.controller.controller = controller - }, - async pull (controller) { - await pullAlgorithm(controller) - }, - async cancel (reason) { - await cancelAlgorithm(reason) - } - }, - { - highWaterMark: 0, - size () { - return 1 - } +IconvLiteEncoderStream.prototype._transform = function(chunk, encoding, done) { + if (typeof chunk != 'string') + return done(new Error("Iconv encoding stream needs strings as its input.")); + try { + var res = this.conv.write(chunk); + if (res && res.length) this.push(res); + done(); } - ) + catch (e) { + done(e); + } +} - // 17. Run these steps, but abort when the ongoing fetch is terminated: +IconvLiteEncoderStream.prototype._flush = function(done) { + try { + var res = this.conv.end(); + if (res && res.length) this.push(res); + done(); + } + catch (e) { + done(e); + } +} - // 1. Set response’s body to a new body whose stream is stream. - response.body = { stream } +IconvLiteEncoderStream.prototype.collect = function(cb) { + var chunks = []; + this.on('error', cb); + this.on('data', function(chunk) { chunks.push(chunk); }); + this.on('end', function() { + cb(null, Buffer.concat(chunks)); + }); + return this; +} - // 2. If response is not a network error and request’s cache mode is - // not "no-store", then update response in httpCache for request. - // TODO - // 3. If includeCredentials is true and the user agent is not configured - // to block cookies for request (see section 7 of [COOKIES]), then run the - // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on - // the value of each header whose name is a byte-case-insensitive match for - // `Set-Cookie` in response’s header list, if any, and request’s current URL. - // TODO +// == Decoder stream ======================================================= +function IconvLiteDecoderStream(conv, options) { + this.conv = conv; + options = options || {}; + options.encoding = this.encoding = 'utf8'; // We output strings. + Transform.call(this, options); +} - // 18. If aborted, then: - // TODO +IconvLiteDecoderStream.prototype = Object.create(Transform.prototype, { + constructor: { value: IconvLiteDecoderStream } +}); - // 19. Run these steps in parallel: +IconvLiteDecoderStream.prototype._transform = function(chunk, encoding, done) { + if (!Buffer.isBuffer(chunk)) + return done(new Error("Iconv decoding stream needs buffers as its input.")); + try { + var res = this.conv.write(chunk); + if (res && res.length) this.push(res, this.encoding); + done(); + } + catch (e) { + done(e); + } +} - // 1. Run these steps, but abort when fetchParams is canceled: - fetchParams.controller.on('terminated', onAborted) - fetchParams.controller.resume = async () => { - // 1. While true - while (true) { - // 1-3. See onData... +IconvLiteDecoderStream.prototype._flush = function(done) { + try { + var res = this.conv.end(); + if (res && res.length) this.push(res, this.encoding); + done(); + } + catch (e) { + done(e); + } +} - // 4. Set bytes to the result of handling content codings given - // codings and bytes. - let bytes - let isFailure - try { - const { done, value } = await fetchParams.controller.next() +IconvLiteDecoderStream.prototype.collect = function(cb) { + var res = ''; + this.on('error', cb); + this.on('data', function(chunk) { res += chunk; }); + this.on('end', function() { + cb(null, res); + }); + return this; +} - if (isAborted(fetchParams)) { - break - } - bytes = done ? undefined : value - } catch (err) { - if (fetchParams.controller.ended && !timingInfo.encodedBodySize) { - // zlib doesn't like empty streams. - bytes = undefined - } else { - bytes = err - // err may be propagated from the result of calling readablestream.cancel, - // which might not be an error. https://github.com/nodejs/undici/issues/2009 - isFailure = true - } - } +/***/ }), - if (bytes === undefined) { - // 2. Otherwise, if the bytes transmission for response’s message - // body is done normally and stream is readable, then close - // stream, finalize response for fetchParams and response, and - // abort these in-parallel steps. - readableStreamClose(fetchParams.controller.controller) +/***/ 91230: +/***/ ((module) => { - finalizeResponse(fetchParams, response) +// A simple implementation of make-array +function makeArray (subject) { + return Array.isArray(subject) + ? subject + : [subject] +} + +const EMPTY = '' +const SPACE = ' ' +const ESCAPE = '\\' +const REGEX_TEST_BLANK_LINE = /^\s+$/ +const REGEX_INVALID_TRAILING_BACKSLASH = /(?:[^\\]|^)\\$/ +const REGEX_REPLACE_LEADING_EXCAPED_EXCLAMATION = /^\\!/ +const REGEX_REPLACE_LEADING_EXCAPED_HASH = /^\\#/ +const REGEX_SPLITALL_CRLF = /\r?\n/g +// /foo, +// ./foo, +// ../foo, +// . +// .. +const REGEX_TEST_INVALID_PATH = /^\.*\/|^\.+$/ + +const SLASH = '/' + +// Do not use ternary expression here, since "istanbul ignore next" is buggy +let TMP_KEY_IGNORE = 'node-ignore' +/* istanbul ignore else */ +if (typeof Symbol !== 'undefined') { + TMP_KEY_IGNORE = Symbol.for('node-ignore') +} +const KEY_IGNORE = TMP_KEY_IGNORE + +const define = (object, key, value) => + Object.defineProperty(object, key, {value}) + +const REGEX_REGEXP_RANGE = /([0-z])-([0-z])/g + +const RETURN_FALSE = () => false + +// Sanitize the range of a regular expression +// The cases are complicated, see test cases for details +const sanitizeRange = range => range.replace( + REGEX_REGEXP_RANGE, + (match, from, to) => from.charCodeAt(0) <= to.charCodeAt(0) + ? match + // Invalid range (out of order) which is ok for gitignore rules but + // fatal for JavaScript regular expression, so eliminate it. + : EMPTY +) - return - } +// See fixtures #59 +const cleanRangeBackSlash = slashes => { + const {length} = slashes + return slashes.slice(0, length - length % 2) +} + +// > If the pattern ends with a slash, +// > it is removed for the purpose of the following description, +// > but it would only find a match with a directory. +// > In other words, foo/ will match a directory foo and paths underneath it, +// > but will not match a regular file or a symbolic link foo +// > (this is consistent with the way how pathspec works in general in Git). +// '`foo/`' will not match regular file '`foo`' or symbolic link '`foo`' +// -> ignore-rules will not deal with it, because it costs extra `fs.stat` call +// you could use option `mark: true` with `glob` + +// '`foo/`' should not continue with the '`..`' +const REPLACERS = [ + + [ + // remove BOM + // TODO: + // Other similar zero-width characters? + /^\uFEFF/, + () => EMPTY + ], + + // > Trailing spaces are ignored unless they are quoted with backslash ("\") + [ + // (a\ ) -> (a ) + // (a ) -> (a) + // (a \ ) -> (a ) + /\\?\s+$/, + match => match.indexOf('\\') === 0 + ? SPACE + : EMPTY + ], + + // replace (\ ) with ' ' + [ + /\\\s/g, + () => SPACE + ], + + // Escape metacharacters + // which is written down by users but means special for regular expressions. + + // > There are 12 characters with special meanings: + // > - the backslash \, + // > - the caret ^, + // > - the dollar sign $, + // > - the period or dot ., + // > - the vertical bar or pipe symbol |, + // > - the question mark ?, + // > - the asterisk or star *, + // > - the plus sign +, + // > - the opening parenthesis (, + // > - the closing parenthesis ), + // > - and the opening square bracket [, + // > - the opening curly brace {, + // > These special characters are often called "metacharacters". + [ + /[\\$.|*+(){^]/g, + match => `\\${match}` + ], + + [ + // > a question mark (?) matches a single character + /(?!\\)\?/g, + () => '[^/]' + ], + + // leading slash + [ + + // > A leading slash matches the beginning of the pathname. + // > For example, "/*.c" matches "cat-file.c" but not "mozilla-sha1/sha1.c". + // A leading slash matches the beginning of the pathname + /^\//, + () => '^' + ], + + // replace special metacharacter slash after the leading slash + [ + /\//g, + () => '\\/' + ], + + [ + // > A leading "**" followed by a slash means match in all directories. + // > For example, "**/foo" matches file or directory "foo" anywhere, + // > the same as pattern "foo". + // > "**/foo/bar" matches file or directory "bar" anywhere that is directly + // > under directory "foo". + // Notice that the '*'s have been replaced as '\\*' + /^\^*\\\*\\\*\\\//, + + // '**/foo' <-> 'foo' + () => '^(?:.*\\/)?' + ], + + // starting + [ + // there will be no leading '/' + // (which has been replaced by section "leading slash") + // If starts with '**', adding a '^' to the regular expression also works + /^(?=[^^])/, + function startingReplacer () { + // If has a slash `/` at the beginning or middle + return !/\/(?!$)/.test(this) + // > Prior to 2.22.1 + // > If the pattern does not contain a slash /, + // > Git treats it as a shell glob pattern + // Actually, if there is only a trailing slash, + // git also treats it as a shell glob pattern + + // After 2.22.1 (compatible but clearer) + // > If there is a separator at the beginning or middle (or both) + // > of the pattern, then the pattern is relative to the directory + // > level of the particular .gitignore file itself. + // > Otherwise the pattern may also match at any level below + // > the .gitignore level. + ? '(?:^|\\/)' + + // > Otherwise, Git treats the pattern as a shell glob suitable for + // > consumption by fnmatch(3) + : '^' + } + ], + + // two globstars + [ + // Use lookahead assertions so that we could match more than one `'/**'` + /\\\/\\\*\\\*(?=\\\/|$)/g, + + // Zero, one or several directories + // should not use '*', or it will be replaced by the next replacer + + // Check if it is not the last `'/**'` + (_, index, str) => index + 6 < str.length + + // case: /**/ + // > A slash followed by two consecutive asterisks then a slash matches + // > zero or more directories. + // > For example, "a/**/b" matches "a/b", "a/x/b", "a/x/y/b" and so on. + // '/**/' + ? '(?:\\/[^\\/]+)*' + + // case: /** + // > A trailing `"/**"` matches everything inside. + + // #21: everything inside but it should not include the current folder + : '\\/.+' + ], + + // normal intermediate wildcards + [ + // Never replace escaped '*' + // ignore rule '\*' will match the path '*' + + // 'abc.*/' -> go + // 'abc.*' -> skip this rule, + // coz trailing single wildcard will be handed by [trailing wildcard] + /(^|[^\\]+)(\\\*)+(?=.+)/g, + + // '*.js' matches '.js' + // '*.js' doesn't match 'abc' + (_, p1, p2) => { + // 1. + // > An asterisk "*" matches anything except a slash. + // 2. + // > Other consecutive asterisks are considered regular asterisks + // > and will match according to the previous rules. + const unescaped = p2.replace(/\\\*/g, '[^\\/]*') + return p1 + unescaped + } + ], + + [ + // unescape, revert step 3 except for back slash + // For example, if a user escape a '\\*', + // after step 3, the result will be '\\\\\\*' + /\\\\\\(?=[$.|*+(){^])/g, + () => ESCAPE + ], + + [ + // '\\\\' -> '\\' + /\\\\/g, + () => ESCAPE + ], + + [ + // > The range notation, e.g. [a-zA-Z], + // > can be used to match one of the characters in a range. + + // `\` is escaped by step 3 + /(\\)?\[([^\]/]*?)(\\*)($|\])/g, + (match, leadEscape, range, endEscape, close) => leadEscape === ESCAPE + // '\\[bar]' -> '\\\\[bar\\]' + ? `\\[${range}${cleanRangeBackSlash(endEscape)}${close}` + : close === ']' + ? endEscape.length % 2 === 0 + // A normal case, and it is a range notation + // '[bar]' + // '[bar\\\\]' + ? `[${sanitizeRange(range)}${endEscape}]` + // Invalid range notaton + // '[bar\\]' -> '[bar\\\\]' + : '[]' + : '[]' + ], + + // ending + [ + // 'js' will not match 'js.' + // 'ab' will not match 'abc' + /(?:[^*])$/, + + // WTF! + // https://git-scm.com/docs/gitignore + // changes in [2.22.1](https://git-scm.com/docs/gitignore/2.22.1) + // which re-fixes #24, #38 + + // > If there is a separator at the end of the pattern then the pattern + // > will only match directories, otherwise the pattern can match both + // > files and directories. + + // 'js*' will not match 'a.js' + // 'js/' will not match 'a.js' + // 'js' will match 'a.js' and 'a.js/' + match => /\/$/.test(match) + // foo/ will not match 'foo' + ? `${match}$` + // foo matches 'foo' and 'foo/' + : `${match}(?=$|\\/$)` + ], + + // trailing wildcard + [ + /(\^|\\\/)?\\\*$/, + (_, p1) => { + const prefix = p1 + // '\^': + // '/*' does not match EMPTY + // '/*' does not match everything + + // '\\\/': + // 'abc/*' does not match 'abc/' + ? `${p1}[^/]+` + + // 'a*' matches 'a' + // 'a*' matches 'aa' + : '[^/]*' + + return `${prefix}(?=$|\\/$)` + } + ], +] - // 5. Increase timingInfo’s decoded body size by bytes’s length. - timingInfo.decodedBodySize += bytes?.byteLength ?? 0 +// A simple cache, because an ignore rule only has only one certain meaning +const regexCache = Object.create(null) - // 6. If bytes is failure, then terminate fetchParams’s controller. - if (isFailure) { - fetchParams.controller.terminate(bytes) - return - } +// @param {pattern} +const makeRegex = (pattern, ignoreCase) => { + let source = regexCache[pattern] - // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes - // into stream. - fetchParams.controller.controller.enqueue(new Uint8Array(bytes)) + if (!source) { + source = REPLACERS.reduce( + (prev, current) => prev.replace(current[0], current[1].bind(pattern)), + pattern + ) + regexCache[pattern] = source + } - // 8. If stream is errored, then terminate the ongoing fetch. - if (isErrored(stream)) { - fetchParams.controller.terminate() - return - } + return ignoreCase + ? new RegExp(source, 'i') + : new RegExp(source) +} - // 9. If stream doesn’t need more data ask the user agent to suspend - // the ongoing fetch. - if (!fetchParams.controller.controller.desiredSize) { - return - } - } - } +const isString = subject => typeof subject === 'string' - // 2. If aborted, then: - function onAborted (reason) { - // 2. If fetchParams is aborted, then: - if (isAborted(fetchParams)) { - // 1. Set response’s aborted flag. - response.aborted = true +// > A blank line matches no files, so it can serve as a separator for readability. +const checkPattern = pattern => pattern + && isString(pattern) + && !REGEX_TEST_BLANK_LINE.test(pattern) + && !REGEX_INVALID_TRAILING_BACKSLASH.test(pattern) - // 2. If stream is readable, then error stream with the result of - // deserialize a serialized abort reason given fetchParams’s - // controller’s serialized abort reason and an - // implementation-defined realm. - if (isReadable(stream)) { - fetchParams.controller.controller.error( - fetchParams.controller.serializedAbortReason - ) - } - } else { - // 3. Otherwise, if stream is readable, error stream with a TypeError. - if (isReadable(stream)) { - fetchParams.controller.controller.error(new TypeError('terminated', { - cause: isErrorLike(reason) ? reason : undefined - })) - } - } + // > A line starting with # serves as a comment. + && pattern.indexOf('#') !== 0 - // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame. - // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so. - fetchParams.controller.connection.destroy() +const splitPattern = pattern => pattern.split(REGEX_SPLITALL_CRLF) + +class IgnoreRule { + constructor ( + origin, + pattern, + negative, + regex + ) { + this.origin = origin + this.pattern = pattern + this.negative = negative + this.regex = regex } +} - // 20. Return response. - return response +const createRule = (pattern, ignoreCase) => { + const origin = pattern + let negative = false - async function dispatch ({ body }) { - const url = requestCurrentURL(request) - /** @type {import('../..').Agent} */ - const agent = fetchParams.controller.dispatcher + // > An optional prefix "!" which negates the pattern; + if (pattern.indexOf('!') === 0) { + negative = true + pattern = pattern.substr(1) + } - return new Promise((resolve, reject) => agent.dispatch( - { - path: url.pathname + url.search, - origin: url.origin, - method: request.method, - body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, - headers: request.headersList.entries, - maxRedirections: 0, - upgrade: request.mode === 'websocket' ? 'websocket' : undefined - }, - { - body: null, - abort: null, + pattern = pattern + // > Put a backslash ("\") in front of the first "!" for patterns that + // > begin with a literal "!", for example, `"\!important!.txt"`. + .replace(REGEX_REPLACE_LEADING_EXCAPED_EXCLAMATION, '!') + // > Put a backslash ("\") in front of the first hash for patterns that + // > begin with a hash. + .replace(REGEX_REPLACE_LEADING_EXCAPED_HASH, '#') - onConnect (abort) { - // TODO (fix): Do we need connection here? - const { connection } = fetchParams.controller + const regex = makeRegex(pattern, ignoreCase) - if (connection.destroyed) { - abort(new DOMException('The operation was aborted.', 'AbortError')) - } else { - fetchParams.controller.on('terminated', abort) - this.abort = connection.abort = abort - } - }, + return new IgnoreRule( + origin, + pattern, + negative, + regex + ) +} - onHeaders (status, headersList, resume, statusText) { - if (status < 200) { - return - } +const throwError = (message, Ctor) => { + throw new Ctor(message) +} - let codings = [] - let location = '' +const checkPath = (path, originalPath, doThrow) => { + if (!isString(path)) { + return doThrow( + `path must be a string, but got \`${originalPath}\``, + TypeError + ) + } - const headers = new Headers() + // We don't know if we should ignore EMPTY, so throw + if (!path) { + return doThrow(`path must not be empty`, TypeError) + } - // For H2, the headers are a plain JS object - // We distinguish between them and iterate accordingly - if (Array.isArray(headersList)) { - for (let n = 0; n < headersList.length; n += 2) { - const key = headersList[n + 0].toString('latin1') - const val = headersList[n + 1].toString('latin1') - if (key.toLowerCase() === 'content-encoding') { - // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 - // "All content-coding values are case-insensitive..." - codings = val.toLowerCase().split(',').map((x) => x.trim()) - } else if (key.toLowerCase() === 'location') { - location = val - } + // Check if it is a relative path + if (checkPath.isNotRelative(path)) { + const r = '`path.relative()`d' + return doThrow( + `path should be a ${r} string, but got "${originalPath}"`, + RangeError + ) + } - headers[kHeadersList].append(key, val) - } - } else { - const keys = Object.keys(headersList) - for (const key of keys) { - const val = headersList[key] - if (key.toLowerCase() === 'content-encoding') { - // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 - // "All content-coding values are case-insensitive..." - codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse() - } else if (key.toLowerCase() === 'location') { - location = val - } + return true +} - headers[kHeadersList].append(key, val) - } - } +const isNotRelative = path => REGEX_TEST_INVALID_PATH.test(path) - this.body = new Readable({ read: resume }) +checkPath.isNotRelative = isNotRelative +checkPath.convert = p => p - const decoders = [] +class Ignore { + constructor ({ + ignorecase = true, + ignoreCase = ignorecase, + allowRelativePaths = false + } = {}) { + define(this, KEY_IGNORE, true) - const willFollow = request.redirect === 'follow' && - location && - redirectStatusSet.has(status) + this._rules = [] + this._ignoreCase = ignoreCase + this._allowRelativePaths = allowRelativePaths + this._initCache() + } - // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding - if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) { - for (const coding of codings) { - // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2 - if (coding === 'x-gzip' || coding === 'gzip') { - decoders.push(zlib.createGunzip({ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - flush: zlib.constants.Z_SYNC_FLUSH, - finishFlush: zlib.constants.Z_SYNC_FLUSH - })) - } else if (coding === 'deflate') { - decoders.push(zlib.createInflate()) - } else if (coding === 'br') { - decoders.push(zlib.createBrotliDecompress()) - } else { - decoders.length = 0 - break - } - } - } + _initCache () { + this._ignoreCache = Object.create(null) + this._testCache = Object.create(null) + } - resolve({ - status, - statusText, - headersList: headers[kHeadersList], - body: decoders.length - ? pipeline(this.body, ...decoders, () => { }) - : this.body.on('error', () => {}) - }) + _addPattern (pattern) { + // #32 + if (pattern && pattern[KEY_IGNORE]) { + this._rules = this._rules.concat(pattern._rules) + this._added = true + return + } - return true - }, + if (checkPattern(pattern)) { + const rule = createRule(pattern, this._ignoreCase) + this._added = true + this._rules.push(rule) + } + } - onData (chunk) { - if (fetchParams.controller.dump) { - return - } + // @param {Array | string | Ignore} pattern + add (pattern) { + this._added = false - // 1. If one or more bytes have been transmitted from response’s - // message body, then: + makeArray( + isString(pattern) + ? splitPattern(pattern) + : pattern + ).forEach(this._addPattern, this) - // 1. Let bytes be the transmitted bytes. - const bytes = chunk + // Some rules have just added to the ignore, + // making the behavior changed. + if (this._added) { + this._initCache() + } - // 2. Let codings be the result of extracting header list values - // given `Content-Encoding` and response’s header list. - // See pullAlgorithm. + return this + } - // 3. Increase timingInfo’s encoded body size by bytes’s length. - timingInfo.encodedBodySize += bytes.byteLength + // legacy + addPattern (pattern) { + return this.add(pattern) + } - // 4. See pullAlgorithm... + // | ignored : unignored + // negative | 0:0 | 0:1 | 1:0 | 1:1 + // -------- | ------- | ------- | ------- | -------- + // 0 | TEST | TEST | SKIP | X + // 1 | TESTIF | SKIP | TEST | X - return this.body.push(bytes) - }, + // - SKIP: always skip + // - TEST: always test + // - TESTIF: only test if checkUnignored + // - X: that never happen - onComplete () { - if (this.abort) { - fetchParams.controller.off('terminated', this.abort) - } + // @param {boolean} whether should check if the path is unignored, + // setting `checkUnignored` to `false` could reduce additional + // path matching. - fetchParams.controller.ended = true + // @returns {TestResult} true if a file is ignored + _testOne (path, checkUnignored) { + let ignored = false + let unignored = false - this.body.push(null) - }, + this._rules.forEach(rule => { + const {negative} = rule + if ( + unignored === negative && ignored !== unignored + || negative && !ignored && !unignored && !checkUnignored + ) { + return + } - onError (error) { - if (this.abort) { - fetchParams.controller.off('terminated', this.abort) - } + const matched = rule.regex.test(path) - this.body?.destroy(error) + if (matched) { + ignored = !negative + unignored = negative + } + }) - fetchParams.controller.terminate(error) + return { + ignored, + unignored + } + } - reject(error) - }, + // @returns {TestResult} + _test (originalPath, cache, checkUnignored, slices) { + const path = originalPath + // Supports nullable path + && checkPath.convert(originalPath) - onUpgrade (status, headersList, socket) { - if (status !== 101) { - return - } + checkPath( + path, + originalPath, + this._allowRelativePaths + ? RETURN_FALSE + : throwError + ) - const headers = new Headers() + return this._t(path, cache, checkUnignored, slices) + } - for (let n = 0; n < headersList.length; n += 2) { - const key = headersList[n + 0].toString('latin1') - const val = headersList[n + 1].toString('latin1') + _t (path, cache, checkUnignored, slices) { + if (path in cache) { + return cache[path] + } - headers[kHeadersList].append(key, val) - } + if (!slices) { + // path/to/a.js + // ['path', 'to', 'a.js'] + slices = path.split(SLASH) + } - resolve({ - status, - statusText: STATUS_CODES[status], - headersList: headers[kHeadersList], - socket - }) + slices.pop() - return true - } - } - )) + // If the path has no parent directory, just test it + if (!slices.length) { + return cache[path] = this._testOne(path, checkUnignored) + } + + const parent = this._t( + slices.join(SLASH) + SLASH, + cache, + checkUnignored, + slices + ) + + // If the path contains a parent directory, check the parent first + return cache[path] = parent.ignored + // > It is not possible to re-include a file if a parent directory of + // > that file is excluded. + ? parent + : this._testOne(path, checkUnignored) + } + + ignores (path) { + return this._test(path, this._ignoreCache, false).ignored + } + + createFilter () { + return path => !this.ignores(path) + } + + filter (paths) { + return makeArray(paths).filter(this.createFilter()) + } + + // @returns {TestResult} + test (path) { + return this._test(path, this._testCache, true) } } -module.exports = { - fetch, - Fetch, - fetching, - finalizeAndReportTiming +const factory = options => new Ignore(options) + +const isPathValid = path => + checkPath(path && checkPath.convert(path), path, RETURN_FALSE) + +factory.isPathValid = isPathValid + +// Fixes typescript +factory.default = factory + +module.exports = factory + +// Windows +// -------------------------------------------------------------- +/* istanbul ignore if */ +if ( + // Detect `process` so that it can run in browsers. + typeof process !== 'undefined' + && ( + process.env && process.env.IGNORE_TEST_WIN32 + || process.platform === 'win32' + ) +) { + /* eslint no-control-regex: "off" */ + const makePosix = str => /^\\\\\?\\/.test(str) + || /["<>|\u0000-\u001F]+/u.test(str) + ? str + : str.replace(/\\/g, '/') + + checkPath.convert = makePosix + + // 'C:\\foo' <- 'C:\\foo' has been converted to 'C:/' + // 'd:\\foo' + const REGIX_IS_WINDOWS_PATH_ABSOLUTE = /^[a-z]:\//i + checkPath.isNotRelative = path => + REGIX_IS_WINDOWS_PATH_ABSOLUTE.test(path) + || isNotRelative(path) } /***/ }), -/***/ 8359: +/***/ 44124: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; -/* globals AbortController */ - +try { + var util = __nccwpck_require__(73837); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(8544); +} -const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(1472) -const { Headers, fill: fillHeaders, HeadersList } = __nccwpck_require__(554) -const { FinalizationRegistry } = __nccwpck_require__(6436)() -const util = __nccwpck_require__(3983) -const { - isValidHTTPToken, - sameOrigin, - normalizeMethod, - makePolicyContainer, - normalizeMethodRecord -} = __nccwpck_require__(2538) -const { - forbiddenMethodsSet, - corsSafeListedMethodsSet, - referrerPolicy, - requestRedirect, - requestMode, - requestCredentials, - requestCache, - requestDuplex -} = __nccwpck_require__(1037) -const { kEnumerableProperty } = util -const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(5861) -const { webidl } = __nccwpck_require__(1744) -const { getGlobalOrigin } = __nccwpck_require__(1246) -const { URLSerializer } = __nccwpck_require__(685) -const { kHeadersList, kConstruct } = __nccwpck_require__(2785) -const assert = __nccwpck_require__(9491) -const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(2361) +/***/ }), -let TransformStream = globalThis.TransformStream +/***/ 8544: +/***/ ((module) => { -const kAbortController = Symbol('abortController') +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} -const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { - signal.removeEventListener('abort', abort) -}) -// https://fetch.spec.whatwg.org/#request-class -class Request { - // https://fetch.spec.whatwg.org/#dom-request - constructor (input, init = {}) { - if (input === kConstruct) { - return - } +/***/ }), - webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' }) +/***/ 87547: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - input = webidl.converters.RequestInfo(input) - init = webidl.converters.RequestInit(init) +var ip = exports; +var { Buffer } = __nccwpck_require__(14300); +var os = __nccwpck_require__(22037); - // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object - this[kRealm] = { - settingsObject: { - baseUrl: getGlobalOrigin(), - get origin () { - return this.baseUrl?.origin - }, - policyContainer: makePolicyContainer() - } - } +ip.toBuffer = function (ip, buff, offset) { + offset = ~~offset; - // 1. Let request be null. - let request = null + var result; - // 2. Let fallbackMode be null. - let fallbackMode = null + if (this.isV4Format(ip)) { + result = buff || new Buffer(offset + 4); + ip.split(/\./g).map((byte) => { + result[offset++] = parseInt(byte, 10) & 0xff; + }); + } else if (this.isV6Format(ip)) { + var sections = ip.split(':', 8); - // 3. Let baseURL be this’s relevant settings object’s API base URL. - const baseUrl = this[kRealm].settingsObject.baseUrl + var i; + for (i = 0; i < sections.length; i++) { + var isv4 = this.isV4Format(sections[i]); + var v4Buffer; - // 4. Let signal be null. - let signal = null + if (isv4) { + v4Buffer = this.toBuffer(sections[i]); + sections[i] = v4Buffer.slice(0, 2).toString('hex'); + } - // 5. If input is a string, then: - if (typeof input === 'string') { - // 1. Let parsedURL be the result of parsing input with baseURL. - // 2. If parsedURL is failure, then throw a TypeError. - let parsedURL - try { - parsedURL = new URL(input, baseUrl) - } catch (err) { - throw new TypeError('Failed to parse URL from ' + input, { cause: err }) + if (v4Buffer && ++i < 8) { + sections.splice(i, 0, v4Buffer.slice(2, 4).toString('hex')); } + } - // 3. If parsedURL includes credentials, then throw a TypeError. - if (parsedURL.username || parsedURL.password) { - throw new TypeError( - 'Request cannot be constructed from a URL that includes credentials: ' + - input - ) + if (sections[0] === '') { + while (sections.length < 8) sections.unshift('0'); + } else if (sections[sections.length - 1] === '') { + while (sections.length < 8) sections.push('0'); + } else if (sections.length < 8) { + for (i = 0; i < sections.length && sections[i] !== ''; i++); + var argv = [i, 1]; + for (i = 9 - sections.length; i > 0; i--) { + argv.push('0'); } + sections.splice.apply(sections, argv); + } - // 4. Set request to a new request whose URL is parsedURL. - request = makeRequest({ urlList: [parsedURL] }) + result = buff || new Buffer(offset + 16); + for (i = 0; i < sections.length; i++) { + var word = parseInt(sections[i], 16); + result[offset++] = (word >> 8) & 0xff; + result[offset++] = word & 0xff; + } + } - // 5. Set fallbackMode to "cors". - fallbackMode = 'cors' - } else { - // 6. Otherwise: + if (!result) { + throw Error(`Invalid ip address: ${ip}`); + } - // 7. Assert: input is a Request object. - assert(input instanceof Request) + return result; +}; - // 8. Set request to input’s request. - request = input[kState] +ip.toString = function (buff, offset, length) { + offset = ~~offset; + length = length || (buff.length - offset); - // 9. Set signal to input’s signal. - signal = input[kSignal] + var result = []; + var i; + if (length === 4) { + // IPv4 + for (i = 0; i < length; i++) { + result.push(buff[offset + i]); } + result = result.join('.'); + } else if (length === 16) { + // IPv6 + for (i = 0; i < length; i += 2) { + result.push(buff.readUInt16BE(offset + i).toString(16)); + } + result = result.join(':'); + result = result.replace(/(^|:)0(:0)*:0(:|$)/, '$1::$3'); + result = result.replace(/:{3,4}/, '::'); + } - // 7. Let origin be this’s relevant settings object’s origin. - const origin = this[kRealm].settingsObject.origin + return result; +}; - // 8. Let window be "client". - let window = 'client' +var ipv4Regex = /^(\d{1,3}\.){3,3}\d{1,3}$/; +var ipv6Regex = /^(::)?(((\d{1,3}\.){3}(\d{1,3}){1})?([0-9a-f]){0,4}:{0,2}){1,8}(::)?$/i; - // 9. If request’s window is an environment settings object and its origin - // is same origin with origin, then set window to request’s window. - if ( - request.window?.constructor?.name === 'EnvironmentSettingsObject' && - sameOrigin(request.window, origin) - ) { - window = request.window +ip.isV4Format = function (ip) { + return ipv4Regex.test(ip); +}; + +ip.isV6Format = function (ip) { + return ipv6Regex.test(ip); +}; + +function _normalizeFamily(family) { + if (family === 4) { + return 'ipv4'; + } + if (family === 6) { + return 'ipv6'; + } + return family ? family.toLowerCase() : 'ipv4'; +} + +ip.fromPrefixLen = function (prefixlen, family) { + if (prefixlen > 32) { + family = 'ipv6'; + } else { + family = _normalizeFamily(family); + } + + var len = 4; + if (family === 'ipv6') { + len = 16; + } + var buff = new Buffer(len); + + for (var i = 0, n = buff.length; i < n; ++i) { + var bits = 8; + if (prefixlen < 8) { + bits = prefixlen; } + prefixlen -= bits; - // 10. If init["window"] exists and is non-null, then throw a TypeError. - if (init.window != null) { - throw new TypeError(`'window' option '${window}' must be null`) + buff[i] = ~(0xff >> bits) & 0xff; + } + + return ip.toString(buff); +}; + +ip.mask = function (addr, mask) { + addr = ip.toBuffer(addr); + mask = ip.toBuffer(mask); + + var result = new Buffer(Math.max(addr.length, mask.length)); + + // Same protocol - do bitwise and + var i; + if (addr.length === mask.length) { + for (i = 0; i < addr.length; i++) { + result[i] = addr[i] & mask[i]; + } + } else if (mask.length === 4) { + // IPv6 address and IPv4 mask + // (Mask low bits) + for (i = 0; i < mask.length; i++) { + result[i] = addr[addr.length - 4 + i] & mask[i]; + } + } else { + // IPv6 mask and IPv4 addr + for (i = 0; i < result.length - 6; i++) { + result[i] = 0; } - // 11. If init["window"] exists, then set window to "no-window". - if ('window' in init) { - window = 'no-window' + // ::ffff:ipv4 + result[10] = 0xff; + result[11] = 0xff; + for (i = 0; i < addr.length; i++) { + result[i + 12] = addr[i] & mask[i + 12]; } + i += 12; + } + for (; i < result.length; i++) { + result[i] = 0; + } - // 12. Set request to a new request with the following properties: - request = makeRequest({ - // URL request’s URL. - // undici implementation note: this is set as the first item in request's urlList in makeRequest - // method request’s method. - method: request.method, - // header list A copy of request’s header list. - // undici implementation note: headersList is cloned in makeRequest - headersList: request.headersList, - // unsafe-request flag Set. - unsafeRequest: request.unsafeRequest, - // client This’s relevant settings object. - client: this[kRealm].settingsObject, - // window window. - window, - // priority request’s priority. - priority: request.priority, - // origin request’s origin. The propagation of the origin is only significant for navigation requests - // being handled by a service worker. In this scenario a request can have an origin that is different - // from the current client. - origin: request.origin, - // referrer request’s referrer. - referrer: request.referrer, - // referrer policy request’s referrer policy. - referrerPolicy: request.referrerPolicy, - // mode request’s mode. - mode: request.mode, - // credentials mode request’s credentials mode. - credentials: request.credentials, - // cache mode request’s cache mode. - cache: request.cache, - // redirect mode request’s redirect mode. - redirect: request.redirect, - // integrity metadata request’s integrity metadata. - integrity: request.integrity, - // keepalive request’s keepalive. - keepalive: request.keepalive, - // reload-navigation flag request’s reload-navigation flag. - reloadNavigation: request.reloadNavigation, - // history-navigation flag request’s history-navigation flag. - historyNavigation: request.historyNavigation, - // URL list A clone of request’s URL list. - urlList: [...request.urlList] - }) + return ip.toString(result); +}; - const initHasKey = Object.keys(init).length !== 0 +ip.cidr = function (cidrString) { + var cidrParts = cidrString.split('/'); - // 13. If init is not empty, then: - if (initHasKey) { - // 1. If request’s mode is "navigate", then set it to "same-origin". - if (request.mode === 'navigate') { - request.mode = 'same-origin' - } + var addr = cidrParts[0]; + if (cidrParts.length !== 2) { + throw new Error(`invalid CIDR subnet: ${addr}`); + } - // 2. Unset request’s reload-navigation flag. - request.reloadNavigation = false + var mask = ip.fromPrefixLen(parseInt(cidrParts[1], 10)); - // 3. Unset request’s history-navigation flag. - request.historyNavigation = false + return ip.mask(addr, mask); +}; - // 4. Set request’s origin to "client". - request.origin = 'client' +ip.subnet = function (addr, mask) { + var networkAddress = ip.toLong(ip.mask(addr, mask)); - // 5. Set request’s referrer to "client" - request.referrer = 'client' + // Calculate the mask's length. + var maskBuffer = ip.toBuffer(mask); + var maskLength = 0; - // 6. Set request’s referrer policy to the empty string. - request.referrerPolicy = '' + for (var i = 0; i < maskBuffer.length; i++) { + if (maskBuffer[i] === 0xff) { + maskLength += 8; + } else { + var octet = maskBuffer[i] & 0xff; + while (octet) { + octet = (octet << 1) & 0xff; + maskLength++; + } + } + } - // 7. Set request’s URL to request’s current URL. - request.url = request.urlList[request.urlList.length - 1] + var numberOfAddresses = Math.pow(2, 32 - maskLength); - // 8. Set request’s URL list to « request’s URL ». - request.urlList = [request.url] - } + return { + networkAddress: ip.fromLong(networkAddress), + firstAddress: numberOfAddresses <= 2 + ? ip.fromLong(networkAddress) + : ip.fromLong(networkAddress + 1), + lastAddress: numberOfAddresses <= 2 + ? ip.fromLong(networkAddress + numberOfAddresses - 1) + : ip.fromLong(networkAddress + numberOfAddresses - 2), + broadcastAddress: ip.fromLong(networkAddress + numberOfAddresses - 1), + subnetMask: mask, + subnetMaskLength: maskLength, + numHosts: numberOfAddresses <= 2 + ? numberOfAddresses : numberOfAddresses - 2, + length: numberOfAddresses, + contains(other) { + return networkAddress === ip.toLong(ip.mask(other, mask)); + }, + }; +}; - // 14. If init["referrer"] exists, then: - if (init.referrer !== undefined) { - // 1. Let referrer be init["referrer"]. - const referrer = init.referrer +ip.cidrSubnet = function (cidrString) { + var cidrParts = cidrString.split('/'); - // 2. If referrer is the empty string, then set request’s referrer to "no-referrer". - if (referrer === '') { - request.referrer = 'no-referrer' - } else { - // 1. Let parsedReferrer be the result of parsing referrer with - // baseURL. - // 2. If parsedReferrer is failure, then throw a TypeError. - let parsedReferrer - try { - parsedReferrer = new URL(referrer, baseUrl) - } catch (err) { - throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }) - } + var addr = cidrParts[0]; + if (cidrParts.length !== 2) { + throw new Error(`invalid CIDR subnet: ${addr}`); + } - // 3. If one of the following is true - // - parsedReferrer’s scheme is "about" and path is the string "client" - // - parsedReferrer’s origin is not same origin with origin - // then set request’s referrer to "client". - if ( - (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') || - (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) - ) { - request.referrer = 'client' - } else { - // 4. Otherwise, set request’s referrer to parsedReferrer. - request.referrer = parsedReferrer - } - } - } + var mask = ip.fromPrefixLen(parseInt(cidrParts[1], 10)); - // 15. If init["referrerPolicy"] exists, then set request’s referrer policy - // to it. - if (init.referrerPolicy !== undefined) { - request.referrerPolicy = init.referrerPolicy - } + return ip.subnet(addr, mask); +}; - // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise. - let mode - if (init.mode !== undefined) { - mode = init.mode - } else { - mode = fallbackMode - } +ip.not = function (addr) { + var buff = ip.toBuffer(addr); + for (var i = 0; i < buff.length; i++) { + buff[i] = 0xff ^ buff[i]; + } + return ip.toString(buff); +}; - // 17. If mode is "navigate", then throw a TypeError. - if (mode === 'navigate') { - throw webidl.errors.exception({ - header: 'Request constructor', - message: 'invalid request mode navigate.' - }) - } +ip.or = function (a, b) { + var i; - // 18. If mode is non-null, set request’s mode to mode. - if (mode != null) { - request.mode = mode - } + a = ip.toBuffer(a); + b = ip.toBuffer(b); - // 19. If init["credentials"] exists, then set request’s credentials mode - // to it. - if (init.credentials !== undefined) { - request.credentials = init.credentials + // same protocol + if (a.length === b.length) { + for (i = 0; i < a.length; ++i) { + a[i] |= b[i]; } + return ip.toString(a); - // 18. If init["cache"] exists, then set request’s cache mode to it. - if (init.cache !== undefined) { - request.cache = init.cache - } + // mixed protocols + } + var buff = a; + var other = b; + if (b.length > a.length) { + buff = b; + other = a; + } - // 21. If request’s cache mode is "only-if-cached" and request’s mode is - // not "same-origin", then throw a TypeError. - if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') { - throw new TypeError( - "'only-if-cached' can be set only with 'same-origin' mode" - ) - } + var offset = buff.length - other.length; + for (i = offset; i < buff.length; ++i) { + buff[i] |= other[i - offset]; + } - // 22. If init["redirect"] exists, then set request’s redirect mode to it. - if (init.redirect !== undefined) { - request.redirect = init.redirect - } + return ip.toString(buff); +}; - // 23. If init["integrity"] exists, then set request’s integrity metadata to it. - if (init.integrity != null) { - request.integrity = String(init.integrity) - } +ip.isEqual = function (a, b) { + var i; - // 24. If init["keepalive"] exists, then set request’s keepalive to it. - if (init.keepalive !== undefined) { - request.keepalive = Boolean(init.keepalive) + a = ip.toBuffer(a); + b = ip.toBuffer(b); + + // Same protocol + if (a.length === b.length) { + for (i = 0; i < a.length; i++) { + if (a[i] !== b[i]) return false; } + return true; + } - // 25. If init["method"] exists, then: - if (init.method !== undefined) { - // 1. Let method be init["method"]. - let method = init.method + // Swap + if (b.length === 4) { + var t = b; + b = a; + a = t; + } - // 2. If method is not a method or method is a forbidden method, then - // throw a TypeError. - if (!isValidHTTPToken(method)) { - throw new TypeError(`'${method}' is not a valid HTTP method.`) - } + // a - IPv4, b - IPv6 + for (i = 0; i < 10; i++) { + if (b[i] !== 0) return false; + } - if (forbiddenMethodsSet.has(method.toUpperCase())) { - throw new TypeError(`'${method}' HTTP method is unsupported.`) - } + var word = b.readUInt16BE(10); + if (word !== 0 && word !== 0xffff) return false; - // 3. Normalize method. - method = normalizeMethodRecord[method] ?? normalizeMethod(method) + for (i = 0; i < 4; i++) { + if (a[i] !== b[i + 12]) return false; + } - // 4. Set request’s method to method. - request.method = method - } + return true; +}; - // 26. If init["signal"] exists, then set signal to it. - if (init.signal !== undefined) { - signal = init.signal +ip.isPrivate = function (addr) { + // check loopback addresses first + if (ip.isLoopback(addr)) { + return true; + } + + // ensure the ipv4 address is valid + if (!ip.isV6Format(addr)) { + const ipl = ip.normalizeToLong(addr); + if (ipl < 0) { + throw new Error('invalid ipv4 address'); } + // normalize the address for the private range checks that follow + addr = ip.fromLong(ipl); + } - // 27. Set this’s request to request. - this[kState] = request + // check private ranges + return /^(::f{4}:)?10\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$/i.test(addr) + || /^(::f{4}:)?192\.168\.([0-9]{1,3})\.([0-9]{1,3})$/i.test(addr) + || /^(::f{4}:)?172\.(1[6-9]|2\d|30|31)\.([0-9]{1,3})\.([0-9]{1,3})$/i + .test(addr) + || /^(::f{4}:)?169\.254\.([0-9]{1,3})\.([0-9]{1,3})$/i.test(addr) + || /^f[cd][0-9a-f]{2}:/i.test(addr) + || /^fe80:/i.test(addr) + || /^::1$/.test(addr) + || /^::$/.test(addr); +}; - // 28. Set this’s signal to a new AbortSignal object with this’s relevant - // Realm. - // TODO: could this be simplified with AbortSignal.any - // (https://dom.spec.whatwg.org/#dom-abortsignal-any) - const ac = new AbortController() - this[kSignal] = ac.signal - this[kSignal][kRealm] = this[kRealm] +ip.isPublic = function (addr) { + return !ip.isPrivate(addr); +}; - // 29. If signal is not null, then make this’s signal follow signal. - if (signal != null) { - if ( - !signal || - typeof signal.aborted !== 'boolean' || - typeof signal.addEventListener !== 'function' - ) { - throw new TypeError( - "Failed to construct 'Request': member signal is not of type AbortSignal." - ) - } +ip.isLoopback = function (addr) { + // If addr is an IPv4 address in long integer form (no dots and no colons), convert it + if (!/\./.test(addr) && !/:/.test(addr)) { + addr = ip.fromLong(Number(addr)); + } - if (signal.aborted) { - ac.abort(signal.reason) - } else { - // Keep a strong ref to ac while request object - // is alive. This is needed to prevent AbortController - // from being prematurely garbage collected. - // See, https://github.com/nodejs/undici/issues/1926. - this[kAbortController] = ac + return /^(::f{4}:)?127\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})/ + .test(addr) + || /^0177\./.test(addr) + || /^0x7f\./i.test(addr) + || /^fe80::1$/i.test(addr) + || /^::1$/.test(addr) + || /^::$/.test(addr); +}; - const acRef = new WeakRef(ac) - const abort = function () { - const ac = acRef.deref() - if (ac !== undefined) { - ac.abort(this.reason) - } - } +ip.loopback = function (family) { + // + // Default to `ipv4` + // + family = _normalizeFamily(family); - // Third-party AbortControllers may not work with these. - // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619. - try { - // If the max amount of listeners is equal to the default, increase it - // This is only available in node >= v19.9.0 - if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) { - setMaxListeners(100, signal) - } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) { - setMaxListeners(100, signal) - } - } catch {} + if (family !== 'ipv4' && family !== 'ipv6') { + throw new Error('family must be ipv4 or ipv6'); + } - util.addAbortListener(signal, abort) - requestFinalizer.register(ac, { signal, abort }) - } - } + return family === 'ipv4' ? '127.0.0.1' : 'fe80::1'; +}; - // 30. Set this’s headers to a new Headers object with this’s relevant - // Realm, whose header list is request’s header list and guard is - // "request". - this[kHeaders] = new Headers(kConstruct) - this[kHeaders][kHeadersList] = request.headersList - this[kHeaders][kGuard] = 'request' - this[kHeaders][kRealm] = this[kRealm] +// +// ### function address (name, family) +// #### @name {string|'public'|'private'} **Optional** Name or security +// of the network interface. +// #### @family {ipv4|ipv6} **Optional** IP family of the address (defaults +// to ipv4). +// +// Returns the address for the network interface on the current system with +// the specified `name`: +// * String: First `family` address of the interface. +// If not found see `undefined`. +// * 'public': the first public ip address of family. +// * 'private': the first private ip address of family. +// * undefined: First address with `ipv4` or loopback address `127.0.0.1`. +// +ip.address = function (name, family) { + var interfaces = os.networkInterfaces(); + + // + // Default to `ipv4` + // + family = _normalizeFamily(family); + + // + // If a specific network interface has been named, + // return the address. + // + if (name && name !== 'private' && name !== 'public') { + var res = interfaces[name].filter((details) => { + var itemFamily = _normalizeFamily(details.family); + return itemFamily === family; + }); + if (res.length === 0) { + return undefined; + } + return res[0].address; + } - // 31. If this’s request’s mode is "no-cors", then: - if (mode === 'no-cors') { - // 1. If this’s request’s method is not a CORS-safelisted method, - // then throw a TypeError. - if (!corsSafeListedMethodsSet.has(request.method)) { - throw new TypeError( - `'${request.method} is unsupported in no-cors mode.` - ) + var all = Object.keys(interfaces).map((nic) => { + // + // Note: name will only be `public` or `private` + // when this is called. + // + var addresses = interfaces[nic].filter((details) => { + details.family = _normalizeFamily(details.family); + if (details.family !== family || ip.isLoopback(details.address)) { + return false; + } if (!name) { + return true; } - // 2. Set this’s headers’s guard to "request-no-cors". - this[kHeaders][kGuard] = 'request-no-cors' - } + return name === 'public' ? ip.isPrivate(details.address) + : ip.isPublic(details.address); + }); - // 32. If init is not empty, then: - if (initHasKey) { - /** @type {HeadersList} */ - const headersList = this[kHeaders][kHeadersList] - // 1. Let headers be a copy of this’s headers and its associated header - // list. - // 2. If init["headers"] exists, then set headers to init["headers"]. - const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) + return addresses.length ? addresses[0].address : undefined; + }).filter(Boolean); - // 3. Empty this’s headers’s header list. - headersList.clear() + return !all.length ? ip.loopback(family) : all[0]; +}; - // 4. If headers is a Headers object, then for each header in its header - // list, append header’s name/header’s value to this’s headers. - if (headers instanceof HeadersList) { - for (const [key, val] of headers) { - headersList.append(key, val) - } - // Note: Copy the `set-cookie` meta-data. - headersList.cookies = headers.cookies - } else { - // 5. Otherwise, fill this’s headers with headers. - fillHeaders(this[kHeaders], headers) - } - } +ip.toLong = function (ip) { + var ipl = 0; + ip.split('.').forEach((octet) => { + ipl <<= 8; + ipl += parseInt(octet); + }); + return (ipl >>> 0); +}; - // 33. Let inputBody be input’s request’s body if input is a Request - // object; otherwise null. - const inputBody = input instanceof Request ? input[kState].body : null +ip.fromLong = function (ipl) { + return (`${ipl >>> 24}.${ + ipl >> 16 & 255}.${ + ipl >> 8 & 255}.${ + ipl & 255}`); +}; - // 34. If either init["body"] exists and is non-null or inputBody is - // non-null, and request’s method is `GET` or `HEAD`, then throw a - // TypeError. - if ( - (init.body != null || inputBody != null) && - (request.method === 'GET' || request.method === 'HEAD') - ) { - throw new TypeError('Request with GET/HEAD method cannot have body.') +ip.normalizeToLong = function (addr) { + const parts = addr.split('.').map(part => { + // Handle hexadecimal format + if (part.startsWith('0x') || part.startsWith('0X')) { + return parseInt(part, 16); + } + // Handle octal format (strictly digits 0-7 after a leading zero) + else if (part.startsWith('0') && part !== '0' && /^[0-7]+$/.test(part)) { + return parseInt(part, 8); + } + // Handle decimal format, reject invalid leading zeros + else if (/^[1-9]\d*$/.test(part) || part === '0') { + return parseInt(part, 10); + } + // Return NaN for invalid formats to indicate parsing failure + else { + return NaN; } + }); - // 35. Let initBody be null. - let initBody = null + if (parts.some(isNaN)) return -1; // Indicate error with -1 + + let val = 0; + const n = parts.length; + + switch (n) { + case 1: + val = parts[0]; + break; + case 2: + if (parts[0] > 0xff || parts[1] > 0xffffff) return -1; + val = (parts[0] << 24) | (parts[1] & 0xffffff); + break; + case 3: + if (parts[0] > 0xff || parts[1] > 0xff || parts[2] > 0xffff) return -1; + val = (parts[0] << 24) | (parts[1] << 16) | (parts[2] & 0xffff); + break; + case 4: + if (parts.some(part => part > 0xff)) return -1; + val = (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]; + break; + default: + return -1; // Error case + } + + return val >>> 0; +}; - // 36. If init["body"] exists and is non-null, then: - if (init.body != null) { - // 1. Let Content-Type be null. - // 2. Set initBody and Content-Type to the result of extracting - // init["body"], with keepalive set to request’s keepalive. - const [extractedBody, contentType] = extractBody( - init.body, - request.keepalive - ) - initBody = extractedBody - // 3, If Content-Type is non-null and this’s headers’s header list does - // not contain `Content-Type`, then append `Content-Type`/Content-Type to - // this’s headers. - if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) { - this[kHeaders].append('content-type', contentType) - } - } +/***/ }), - // 37. Let inputOrInitBody be initBody if it is non-null; otherwise - // inputBody. - const inputOrInitBody = initBody ?? inputBody +/***/ 24202: +/***/ (function(module, exports) { - // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is - // null, then: - if (inputOrInitBody != null && inputOrInitBody.source == null) { - // 1. If initBody is non-null and init["duplex"] does not exist, - // then throw a TypeError. - if (initBody != null && init.duplex == null) { - throw new TypeError('RequestInit: duplex option is required when sending a body.') - } +(function(root) { + var toString = Function.prototype.toString; - // 2. If this’s request’s mode is neither "same-origin" nor "cors", - // then throw a TypeError. - if (request.mode !== 'same-origin' && request.mode !== 'cors') { - throw new TypeError( - 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' - ) - } + function fnBody(fn) { + return toString.call(fn).replace(/^[^{]*{\s*/,'').replace(/\s*}[^}]*$/,''); + } - // 3. Set this’s request’s use-CORS-preflight flag. - request.useCORSPreflightFlag = true + function isClass(fn) { + return (typeof fn === 'function' && + (/^class(?:\s|{)/.test(toString.call(fn)) || + (/^.*classCallCheck\(/.test(fnBody(fn)))) // babel.js + ); + } + + if (true) { + if ( true && module.exports) { + exports = module.exports = isClass; } + exports.isClass = isClass; + } else {} - // 39. Let finalBody be inputOrInitBody. - let finalBody = inputOrInitBody +})(this); - // 40. If initBody is null and inputBody is non-null, then: - if (initBody == null && inputBody != null) { - // 1. If input is unusable, then throw a TypeError. - if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) { - throw new TypeError( - 'Cannot construct a Request with a Request object that has already been used.' - ) - } - // 2. Set finalBody to the result of creating a proxy for inputBody. - if (!TransformStream) { - TransformStream = (__nccwpck_require__(5356).TransformStream) - } +/***/ }), - // https://streams.spec.whatwg.org/#readablestream-create-a-proxy - const identityTransform = new TransformStream() - inputBody.stream.pipeThrough(identityTransform) - finalBody = { - source: inputBody.source, - length: inputBody.length, - stream: identityTransform.readable - } - } +/***/ 70429: +/***/ ((module) => { - // 41. Set this’s request’s body to finalBody. - this[kState].body = finalBody - } +"use strict"; +/*! + * is-extendable + * + * Copyright (c) 2015, Jon Schlinkert. + * Licensed under the MIT License. + */ - // Returns request’s HTTP method, which is "GET" by default. - get method () { - webidl.brandCheck(this, Request) - // The method getter steps are to return this’s request’s method. - return this[kState].method - } - // Returns the URL of request as a string. - get url () { - webidl.brandCheck(this, Request) +module.exports = function isExtendable(val) { + return typeof val !== 'undefined' && val !== null + && (typeof val === 'object' || typeof val === 'function'); +}; - // The url getter steps are to return this’s request’s URL, serialized. - return URLSerializer(this[kState].url) - } - // Returns a Headers object consisting of the headers associated with request. - // Note that headers added in the network layer by the user agent will not - // be accounted for in this object, e.g., the "Host" header. - get headers () { - webidl.brandCheck(this, Request) +/***/ }), - // The headers getter steps are to return this’s headers. - return this[kHeaders] - } +/***/ 65565: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // Returns the kind of resource requested by request, e.g., "document" - // or "script". - get destination () { - webidl.brandCheck(this, Request) +"use strict"; - // The destination getter are to return this’s request’s destination. - return this[kState].destination - } - // Returns the referrer of request. Its value can be a same-origin URL if - // explicitly set in init, the empty string to indicate no referrer, and - // "about:client" when defaulting to the global’s default. This is used - // during fetching to determine the value of the `Referer` header of the - // request being made. - get referrer () { - webidl.brandCheck(this, Request) +var utils = __nccwpck_require__(95898); +var isStearm = __nccwpck_require__(83362); +// wait for https://github.com/miguelmota/is-class/pull/6 merge +var isClass = __nccwpck_require__(24202); - // 1. If this’s request’s referrer is "no-referrer", then return the - // empty string. - if (this[kState].referrer === 'no-referrer') { - return '' - } +/** + * Expose all methods in core-util-is + */ - // 2. If this’s request’s referrer is "client", then return - // "about:client". - if (this[kState].referrer === 'client') { - return 'about:client' - } +Object.keys(utils).map(function (name) { + exports[transform(name)] = utils[name]; +}); - // Return this’s request’s referrer, serialized. - return this[kState].referrer.toString() - } +/** + * Stream detected by isstream + */ - // Returns the referrer policy associated with request. - // This is used during fetching to compute the value of the request’s - // referrer. - get referrerPolicy () { - webidl.brandCheck(this, Request) +exports.stream = isStearm; +exports.readableStream = isStearm.isReadable; +exports.writableStream = isStearm.isWritable; +exports.duplexStream = isStearm.isDuplex; - // The referrerPolicy getter steps are to return this’s request’s referrer policy. - return this[kState].referrerPolicy - } +/** + * Class detected by is-class + */ + exports["class"] = isClass; - // Returns the mode associated with request, which is a string indicating - // whether the request will use CORS, or will be restricted to same-origin - // URLs. - get mode () { - webidl.brandCheck(this, Request) +/** + * Extend method + */ - // The mode getter steps are to return this’s request’s mode. - return this[kState].mode - } +exports.finite = Number.isFinite; - // Returns the credentials mode associated with request, - // which is a string indicating whether credentials will be sent with the - // request always, never, or only when sent to a same-origin URL. - get credentials () { - // The credentials getter steps are to return this’s request’s credentials mode. - return this[kState].credentials - } +exports[NaN] = Number.isNaN; - // Returns the cache mode associated with request, - // which is a string indicating how the request will - // interact with the browser’s cache when fetching. - get cache () { - webidl.brandCheck(this, Request) +exports.generator = function (obj) { + return obj + && 'function' === typeof obj.next + && 'function' === typeof obj.throw; +}; - // The cache getter steps are to return this’s request’s cache mode. - return this[kState].cache - } +exports.generatorFunction = function (obj) { + return obj + && obj.constructor + && 'GeneratorFunction' === obj.constructor.name; +}; - // Returns the redirect mode associated with request, - // which is a string indicating how redirects for the - // request will be handled during fetching. A request - // will follow redirects by default. - get redirect () { - webidl.brandCheck(this, Request) +exports.asyncFunction = function (obj) { + return obj + && obj.constructor + && 'AsyncFunction' === obj.constructor.name; +}; - // The redirect getter steps are to return this’s request’s redirect mode. - return this[kState].redirect - } +exports.promise = function (obj) { + return obj + && 'function' === typeof obj.then; +}; - // Returns request’s subresource integrity metadata, which is a - // cryptographic hash of the resource being fetched. Its value - // consists of multiple hashes separated by whitespace. [SRI] - get integrity () { - webidl.brandCheck(this, Request) +var MAX_INT_31 = Math.pow(2, 31); - // The integrity getter steps are to return this’s request’s integrity - // metadata. - return this[kState].integrity - } +exports.int = function (obj) { + return utils.isNumber(obj) + && obj % 1 === 0; +}; - // Returns a boolean indicating whether or not request can outlive the - // global in which it was created. - get keepalive () { - webidl.brandCheck(this, Request) +exports.int32 = function (obj) { + return exports.int(obj) + && obj < MAX_INT_31 + && obj >= -MAX_INT_31; +}; - // The keepalive getter steps are to return this’s request’s keepalive. - return this[kState].keepalive - } +exports.long = function (obj) { + return exports.int(obj) + && (obj >= MAX_INT_31 || obj < -MAX_INT_31); +}; - // Returns a boolean indicating whether or not request is for a reload - // navigation. - get isReloadNavigation () { - webidl.brandCheck(this, Request) +exports.Long = function (obj) { + return exports.object(obj) + && exports.number(obj.high) + && exports.number(obj.low); +}; - // The isReloadNavigation getter steps are to return true if this’s - // request’s reload-navigation flag is set; otherwise false. - return this[kState].reloadNavigation - } +exports.double = function (obj) { + return utils.isNumber(obj) + && !isNaN(obj) + && obj % 1 !== 0; +}; - // Returns a boolean indicating whether or not request is for a history - // navigation (a.k.a. back-foward navigation). - get isHistoryNavigation () { - webidl.brandCheck(this, Request) +exports.bigInt = function (obj) { + return 'bigint' === typeof obj; +}; +exports.bigint = exports.bigInt; - // The isHistoryNavigation getter steps are to return true if this’s request’s - // history-navigation flag is set; otherwise false. - return this[kState].historyNavigation - } +/** + * override core-util-is + */ - // Returns the signal associated with request, which is an AbortSignal - // object indicating whether or not request has been aborted, and its - // abort event handler. - get signal () { - webidl.brandCheck(this, Request) +exports.date = function isDate(obj) { + return obj instanceof Date; +}; - // The signal getter steps are to return this’s signal. - return this[kSignal] - } +exports.regExp = function isRegExp(obj) { + return obj instanceof RegExp; +}; +exports.regexp = exports.regExp; - get body () { - webidl.brandCheck(this, Request) +exports.error = function isError(obj) { + return obj instanceof Error; +}; - return this[kState].body ? this[kState].body.stream : null - } +exports.array = Array.isArray; - get bodyUsed () { - webidl.brandCheck(this, Request) +/** + * transform isNull type to null + * @param {[type]} m [description] + * @return {[type]} [description] + */ - return !!this[kState].body && util.isDisturbed(this[kState].body.stream) - } +function transform(m) { + var name = m.slice(2); + name = name[0].toLowerCase() + name.slice(1); + return name; +} - get duplex () { - webidl.brandCheck(this, Request) - return 'half' - } +/***/ }), - // Returns a clone of request. - clone () { - webidl.brandCheck(this, Request) +/***/ 83362: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 1. If this is unusable, then throw a TypeError. - if (this.bodyUsed || this.body?.locked) { - throw new TypeError('unusable') - } +var stream = __nccwpck_require__(12781) - // 2. Let clonedRequest be the result of cloning this’s request. - const clonedRequest = cloneRequest(this[kState]) - // 3. Let clonedRequestObject be the result of creating a Request object, - // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. - const clonedRequestObject = new Request(kConstruct) - clonedRequestObject[kState] = clonedRequest - clonedRequestObject[kRealm] = this[kRealm] - clonedRequestObject[kHeaders] = new Headers(kConstruct) - clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList - clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] - clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] +function isStream (obj) { + return obj instanceof stream.Stream +} - // 4. Make clonedRequestObject’s signal follow this’s signal. - const ac = new AbortController() - if (this.signal.aborted) { - ac.abort(this.signal.reason) - } else { - util.addAbortListener( - this.signal, - () => { - ac.abort(this.signal.reason) - } - ) - } - clonedRequestObject[kSignal] = ac.signal - // 4. Return clonedRequestObject. - return clonedRequestObject - } +function isReadable (obj) { + return isStream(obj) && typeof obj._read == 'function' && typeof obj._readableState == 'object' } -mixinBody(Request) -function makeRequest (init) { - // https://fetch.spec.whatwg.org/#requests - const request = { - method: 'GET', - localURLsOnly: false, - unsafeRequest: false, - body: null, - client: null, - reservedClient: null, - replacesClientId: '', - window: 'client', - keepalive: false, - serviceWorkers: 'all', - initiator: '', - destination: '', - priority: null, - origin: 'client', - policyContainer: 'client', - referrer: 'client', - referrerPolicy: '', - mode: 'no-cors', - useCORSPreflightFlag: false, - credentials: 'same-origin', - useCredentials: false, - cache: 'default', - redirect: 'follow', - integrity: '', - cryptoGraphicsNonceMetadata: '', - parserMetadata: '', - reloadNavigation: false, - historyNavigation: false, - userActivation: false, - taintedOrigin: false, - redirectCount: 0, - responseTainting: 'basic', - preventNoCacheCacheControlHeaderModification: false, - done: false, - timingAllowFailed: false, - ...init, - headersList: init.headersList - ? new HeadersList(init.headersList) - : new HeadersList() - } - request.url = request.urlList[0] - return request +function isWritable (obj) { + return isStream(obj) && typeof obj._write == 'function' && typeof obj._writableState == 'object' } -// https://fetch.spec.whatwg.org/#concept-request-clone -function cloneRequest (request) { - // To clone a request request, run these steps: - // 1. Let newRequest be a copy of request, except for its body. - const newRequest = makeRequest({ ...request, body: null }) +function isDuplex (obj) { + return isReadable(obj) && isWritable(obj) +} - // 2. If request’s body is non-null, set newRequest’s body to the - // result of cloning request’s body. - if (request.body != null) { - newRequest.body = cloneBody(request.body) - } - // 3. Return newRequest. - return newRequest -} +module.exports = isStream +module.exports.isReadable = isReadable +module.exports.isWritable = isWritable +module.exports.isDuplex = isDuplex -Object.defineProperties(Request.prototype, { - method: kEnumerableProperty, - url: kEnumerableProperty, - headers: kEnumerableProperty, - redirect: kEnumerableProperty, - clone: kEnumerableProperty, - signal: kEnumerableProperty, - duplex: kEnumerableProperty, - destination: kEnumerableProperty, - body: kEnumerableProperty, - bodyUsed: kEnumerableProperty, - isHistoryNavigation: kEnumerableProperty, - isReloadNavigation: kEnumerableProperty, - keepalive: kEnumerableProperty, - integrity: kEnumerableProperty, - cache: kEnumerableProperty, - credentials: kEnumerableProperty, - attribute: kEnumerableProperty, - referrerPolicy: kEnumerableProperty, - referrer: kEnumerableProperty, - mode: kEnumerableProperty, - [Symbol.toStringTag]: { - value: 'Request', - configurable: true - } -}) -webidl.converters.Request = webidl.interfaceConverter( - Request -) +/***/ }), -// https://fetch.spec.whatwg.org/#requestinfo -webidl.converters.RequestInfo = function (V) { - if (typeof V === 'string') { - return webidl.converters.USVString(V) - } +/***/ 84139: +/***/ (function(module) { - if (V instanceof Request) { - return webidl.converters.Request(V) - } +/* + * base64.js + * + * Licensed under the BSD 3-Clause License. + * http://opensource.org/licenses/BSD-3-Clause + * + * References: + * http://en.wikipedia.org/wiki/Base64 + */ +;(function (global, factory) { + true + ? module.exports = factory(global) + : 0 +}(( + typeof self !== 'undefined' ? self + : typeof window !== 'undefined' ? window + : typeof global !== 'undefined' ? global +: this +), function(global) { + 'use strict'; + // existing version for noConflict() + global = global || {}; + var _Base64 = global.Base64; + var version = "2.6.4"; + // constants + var b64chars + = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; + var b64tab = function(bin) { + var t = {}; + for (var i = 0, l = bin.length; i < l; i++) t[bin.charAt(i)] = i; + return t; + }(b64chars); + var fromCharCode = String.fromCharCode; + // encoder stuff + var cb_utob = function(c) { + if (c.length < 2) { + var cc = c.charCodeAt(0); + return cc < 0x80 ? c + : cc < 0x800 ? (fromCharCode(0xc0 | (cc >>> 6)) + + fromCharCode(0x80 | (cc & 0x3f))) + : (fromCharCode(0xe0 | ((cc >>> 12) & 0x0f)) + + fromCharCode(0x80 | ((cc >>> 6) & 0x3f)) + + fromCharCode(0x80 | ( cc & 0x3f))); + } else { + var cc = 0x10000 + + (c.charCodeAt(0) - 0xD800) * 0x400 + + (c.charCodeAt(1) - 0xDC00); + return (fromCharCode(0xf0 | ((cc >>> 18) & 0x07)) + + fromCharCode(0x80 | ((cc >>> 12) & 0x3f)) + + fromCharCode(0x80 | ((cc >>> 6) & 0x3f)) + + fromCharCode(0x80 | ( cc & 0x3f))); + } + }; + var re_utob = /[\uD800-\uDBFF][\uDC00-\uDFFFF]|[^\x00-\x7F]/g; + var utob = function(u) { + return u.replace(re_utob, cb_utob); + }; + var cb_encode = function(ccc) { + var padlen = [0, 2, 1][ccc.length % 3], + ord = ccc.charCodeAt(0) << 16 + | ((ccc.length > 1 ? ccc.charCodeAt(1) : 0) << 8) + | ((ccc.length > 2 ? ccc.charCodeAt(2) : 0)), + chars = [ + b64chars.charAt( ord >>> 18), + b64chars.charAt((ord >>> 12) & 63), + padlen >= 2 ? '=' : b64chars.charAt((ord >>> 6) & 63), + padlen >= 1 ? '=' : b64chars.charAt(ord & 63) + ]; + return chars.join(''); + }; + var btoa = global.btoa && typeof global.btoa == 'function' + ? function(b){ return global.btoa(b) } : function(b) { + if (b.match(/[^\x00-\xFF]/)) throw new RangeError( + 'The string contains invalid characters.' + ); + return b.replace(/[\s\S]{1,3}/g, cb_encode); + }; + var _encode = function(u) { + return btoa(utob(String(u))); + }; + var mkUriSafe = function (b64) { + return b64.replace(/[+\/]/g, function(m0) { + return m0 == '+' ? '-' : '_'; + }).replace(/=/g, ''); + }; + var encode = function(u, urisafe) { + return urisafe ? mkUriSafe(_encode(u)) : _encode(u); + }; + var encodeURI = function(u) { return encode(u, true) }; + var fromUint8Array; + if (global.Uint8Array) fromUint8Array = function(a, urisafe) { + // return btoa(fromCharCode.apply(null, a)); + var b64 = ''; + for (var i = 0, l = a.length; i < l; i += 3) { + var a0 = a[i], a1 = a[i+1], a2 = a[i+2]; + var ord = a0 << 16 | a1 << 8 | a2; + b64 += b64chars.charAt( ord >>> 18) + + b64chars.charAt((ord >>> 12) & 63) + + ( typeof a1 != 'undefined' + ? b64chars.charAt((ord >>> 6) & 63) : '=') + + ( typeof a2 != 'undefined' + ? b64chars.charAt( ord & 63) : '='); + } + return urisafe ? mkUriSafe(b64) : b64; + }; + // decoder stuff + var re_btou = /[\xC0-\xDF][\x80-\xBF]|[\xE0-\xEF][\x80-\xBF]{2}|[\xF0-\xF7][\x80-\xBF]{3}/g; + var cb_btou = function(cccc) { + switch(cccc.length) { + case 4: + var cp = ((0x07 & cccc.charCodeAt(0)) << 18) + | ((0x3f & cccc.charCodeAt(1)) << 12) + | ((0x3f & cccc.charCodeAt(2)) << 6) + | (0x3f & cccc.charCodeAt(3)), + offset = cp - 0x10000; + return (fromCharCode((offset >>> 10) + 0xD800) + + fromCharCode((offset & 0x3FF) + 0xDC00)); + case 3: + return fromCharCode( + ((0x0f & cccc.charCodeAt(0)) << 12) + | ((0x3f & cccc.charCodeAt(1)) << 6) + | (0x3f & cccc.charCodeAt(2)) + ); + default: + return fromCharCode( + ((0x1f & cccc.charCodeAt(0)) << 6) + | (0x3f & cccc.charCodeAt(1)) + ); + } + }; + var btou = function(b) { + return b.replace(re_btou, cb_btou); + }; + var cb_decode = function(cccc) { + var len = cccc.length, + padlen = len % 4, + n = (len > 0 ? b64tab[cccc.charAt(0)] << 18 : 0) + | (len > 1 ? b64tab[cccc.charAt(1)] << 12 : 0) + | (len > 2 ? b64tab[cccc.charAt(2)] << 6 : 0) + | (len > 3 ? b64tab[cccc.charAt(3)] : 0), + chars = [ + fromCharCode( n >>> 16), + fromCharCode((n >>> 8) & 0xff), + fromCharCode( n & 0xff) + ]; + chars.length -= [0, 0, 2, 1][padlen]; + return chars.join(''); + }; + var _atob = global.atob && typeof global.atob == 'function' + ? function(a){ return global.atob(a) } : function(a){ + return a.replace(/\S{1,4}/g, cb_decode); + }; + var atob = function(a) { + return _atob(String(a).replace(/[^A-Za-z0-9\+\/]/g, '')); + }; + var _decode = function(a) { return btou(_atob(a)) }; + var _fromURI = function(a) { + return String(a).replace(/[-_]/g, function(m0) { + return m0 == '-' ? '+' : '/' + }).replace(/[^A-Za-z0-9\+\/]/g, ''); + }; + var decode = function(a){ + return _decode(_fromURI(a)); + }; + var toUint8Array; + if (global.Uint8Array) toUint8Array = function(a) { + return Uint8Array.from(atob(_fromURI(a)), function(c) { + return c.charCodeAt(0); + }); + }; + var noConflict = function() { + var Base64 = global.Base64; + global.Base64 = _Base64; + return Base64; + }; + // export Base64 + global.Base64 = { + VERSION: version, + atob: atob, + btoa: btoa, + fromBase64: decode, + toBase64: encode, + utob: utob, + encode: encode, + encodeURI: encodeURI, + btou: btou, + decode: decode, + noConflict: noConflict, + fromUint8Array: fromUint8Array, + toUint8Array: toUint8Array + }; + // if ES5 is available, make Base64.extendString() available + if (typeof Object.defineProperty === 'function') { + var noEnum = function(v){ + return {value:v,enumerable:false,writable:true,configurable:true}; + }; + global.Base64.extendString = function () { + Object.defineProperty( + String.prototype, 'fromBase64', noEnum(function () { + return decode(this) + })); + Object.defineProperty( + String.prototype, 'toBase64', noEnum(function (urisafe) { + return encode(this, urisafe) + })); + Object.defineProperty( + String.prototype, 'toBase64URI', noEnum(function () { + return encode(this, true) + })); + }; + } + // + // export Base64 to the namespace + // + if (global['Meteor']) { // Meteor.js + Base64 = global.Base64; + } + // module.exports and AMD are mutually exclusive. + // module.exports has precedence. + if ( true && module.exports) { + module.exports.Base64 = global.Base64; + } + else if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define([], function(){ return global.Base64 }); + } + // that's it! + return {Base64: global.Base64} +})); - return webidl.converters.USVString(V) -} -webidl.converters.AbortSignal = webidl.interfaceConverter( - AbortSignal -) +/***/ }), -// https://fetch.spec.whatwg.org/#requestinit -webidl.converters.RequestInit = webidl.dictionaryConverter([ - { - key: 'method', - converter: webidl.converters.ByteString - }, - { - key: 'headers', - converter: webidl.converters.HeadersInit - }, - { - key: 'body', - converter: webidl.nullableConverter( - webidl.converters.BodyInit - ) - }, - { - key: 'referrer', - converter: webidl.converters.USVString - }, - { - key: 'referrerPolicy', - converter: webidl.converters.DOMString, - // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy - allowedValues: referrerPolicy - }, - { - key: 'mode', - converter: webidl.converters.DOMString, - // https://fetch.spec.whatwg.org/#concept-request-mode - allowedValues: requestMode - }, - { - key: 'credentials', - converter: webidl.converters.DOMString, - // https://fetch.spec.whatwg.org/#requestcredentials - allowedValues: requestCredentials - }, - { - key: 'cache', - converter: webidl.converters.DOMString, - // https://fetch.spec.whatwg.org/#requestcache - allowedValues: requestCache - }, - { - key: 'redirect', - converter: webidl.converters.DOMString, - // https://fetch.spec.whatwg.org/#requestredirect - allowedValues: requestRedirect - }, - { - key: 'integrity', - converter: webidl.converters.DOMString - }, - { - key: 'keepalive', - converter: webidl.converters.boolean - }, - { - key: 'signal', - converter: webidl.nullableConverter( - (signal) => webidl.converters.AbortSignal( - signal, - { strict: false } - ) - ) - }, - { - key: 'window', - converter: webidl.converters.any - }, - { - key: 'duplex', - converter: webidl.converters.DOMString, - allowedValues: requestDuplex - } -]) +/***/ 53094: +/***/ (function(__unused_webpack_module, exports) { -module.exports = { Request, makeRequest } +(function (global, factory) { + if (typeof define === "function" && define.amd) { + define(["exports"], factory); + } else if (true) { + factory(exports); + } else { var mod; } +})(typeof globalThis !== "undefined" ? globalThis : typeof self !== "undefined" ? self : this, function (_exports) { + "use strict"; + + Object.defineProperty(_exports, "__esModule", { + value: true + }); + _exports.toXML = _exports.default = void 0; + function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); } -/***/ }), + function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } -/***/ 7823: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } -"use strict"; + function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter); } + function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); } -const { Headers, HeadersList, fill } = __nccwpck_require__(554) -const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(1472) -const util = __nccwpck_require__(3983) -const { kEnumerableProperty } = util -const { - isValidReasonPhrase, - isCancelled, - isAborted, - isBlobLike, - serializeJavascriptValueToJSONString, - isErrorLike, - isomorphicEncode -} = __nccwpck_require__(2538) -const { - redirectStatusSet, - nullBodyStatus, - DOMException -} = __nccwpck_require__(1037) -const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(5861) -const { webidl } = __nccwpck_require__(1744) -const { FormData } = __nccwpck_require__(2015) -const { getGlobalOrigin } = __nccwpck_require__(1246) -const { URLSerializer } = __nccwpck_require__(685) -const { kHeadersList, kConstruct } = __nccwpck_require__(2785) -const assert = __nccwpck_require__(9491) -const { types } = __nccwpck_require__(3837) + function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; } -const ReadableStream = globalThis.ReadableStream || (__nccwpck_require__(5356).ReadableStream) -const textEncoder = new TextEncoder('utf-8') + function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } -// https://fetch.spec.whatwg.org/#response-class -class Response { - // Creates network error Response. - static error () { - // TODO - const relevantRealm = { settingsObject: {} } + function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } - // The static error() method steps are to return the result of creating a - // Response object, given a new network error, "immutable", and this’s - // relevant Realm. - const responseObject = new Response() - responseObject[kState] = makeNetworkError() - responseObject[kRealm] = relevantRealm - responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList - responseObject[kHeaders][kGuard] = 'immutable' - responseObject[kHeaders][kRealm] = relevantRealm - return responseObject - } + function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } - // https://fetch.spec.whatwg.org/#dom-response-json - static json (data, init = {}) { - webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' }) + function _typeof(obj) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (obj) { return typeof obj; } : function (obj) { return obj && "function" == typeof Symbol && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }, _typeof(obj); } - if (init !== null) { - init = webidl.converters.ResponseInit(init) - } + var ARRAY = "array"; + var BOOLEAN = "boolean"; + var DATE = "date"; + var NULL = "null"; + var NUMBER = "number"; + var OBJECT = "object"; + var SPECIAL_OBJECT = "special-object"; + var STRING = "string"; + var PRIVATE_VARS = ["_selfCloseTag", "_attrs"]; + var PRIVATE_VARS_REGEXP = new RegExp(PRIVATE_VARS.join("|"), "g"); + /** + * Determines the indent string based on current tree depth. + */ - // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data. - const bytes = textEncoder.encode( - serializeJavascriptValueToJSONString(data) - ) + var getIndentStr = function getIndentStr() { + var indent = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ""; + var depth = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; + return indent.repeat(depth); + }; + /** + * Sugar function supplementing JS's quirky typeof operator, plus some extra help to detect + * "special objects" expected by jstoxml. + * Example: + * getType(new Date()); + * -> 'date' + */ - // 2. Let body be the result of extracting bytes. - const body = extractBody(bytes) - // 3. Let responseObject be the result of creating a Response object, given a new response, - // "response", and this’s relevant Realm. - const relevantRealm = { settingsObject: {} } - const responseObject = new Response() - responseObject[kRealm] = relevantRealm - responseObject[kHeaders][kGuard] = 'response' - responseObject[kHeaders][kRealm] = relevantRealm + var getType = function getType(val) { + return Array.isArray(val) && ARRAY || _typeof(val) === OBJECT && val !== null && val._name && SPECIAL_OBJECT || val instanceof Date && DATE || val === null && NULL || _typeof(val); + }; + /** + * Replaces matching values in a string with a new value. + * Example: + * filterStr('foo&bar', { '&': '&' }); + * -> 'foo&bar' + */ - // 4. Perform initialize a response given responseObject, init, and (body, "application/json"). - initializeResponse(responseObject, init, { body: body[0], type: 'application/json' }) - // 5. Return responseObject. - return responseObject - } + var filterStr = function filterStr() { + var inputStr = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ""; + var filter = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - // Creates a redirect Response that redirects to url with status status. - static redirect (url, status = 302) { - const relevantRealm = { settingsObject: {} } + // Passthrough/no-op for nonstrings (e.g. number, boolean). + if (typeof inputStr !== "string") { + return inputStr; + } - webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' }) + var regexp = new RegExp("(".concat(Object.keys(filter).join("|"), ")(?!(\\w|#)*;)"), "g"); + return String(inputStr).replace(regexp, function (str, entity) { + return filter[entity] || ""; + }); + }; + /** + * Maps an object or array of arribute keyval pairs to a string. + * Examples: + * { foo: 'bar', baz: 'g' } -> 'foo="bar" baz="g"' + * [ { ⚡: true }, { foo: 'bar' } ] -> '⚡ foo="bar"' + */ - url = webidl.converters.USVString(url) - status = webidl.converters['unsigned short'](status) - // 1. Let parsedURL be the result of parsing url with current settings - // object’s API base URL. - // 2. If parsedURL is failure, then throw a TypeError. - // TODO: base-URL? - let parsedURL - try { - parsedURL = new URL(url, getGlobalOrigin()) - } catch (err) { - throw Object.assign(new TypeError('Failed to parse URL from ' + url), { - cause: err - }) - } + var getAttributeKeyVals = function getAttributeKeyVals() { + var attributes = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + var filter = arguments.length > 1 ? arguments[1] : undefined; + var keyVals = []; + + if (Array.isArray(attributes)) { + // Array containing complex objects and potentially duplicate attributes. + keyVals = attributes.map(function (attr) { + var key = Object.keys(attr)[0]; + var val = attr[key]; + var filteredVal = filter ? filterStr(val, filter) : val; + var valStr = filteredVal === true ? "" : "=\"".concat(filteredVal, "\""); + return "".concat(key).concat(valStr); + }); + } else { + var keys = Object.keys(attributes); + keyVals = keys.map(function (key) { + // Simple object - keyval pairs. + // For boolean true, simply output the key. + var filteredVal = filter ? filterStr(attributes[key], filter) : attributes[key]; + var valStr = attributes[key] === true ? "" : "=\"".concat(filteredVal, "\""); + return "".concat(key).concat(valStr); + }); + } + + return keyVals; + }; + /** + * Converts an attributes object/array to a string of keyval pairs. + * Example: + * formatAttributes({ a: 1, b: 2 }) + * -> 'a="1" b="2"' + */ - // 3. If status is not a redirect status, then throw a RangeError. - if (!redirectStatusSet.has(status)) { - throw new RangeError('Invalid status code ' + status) - } - // 4. Let responseObject be the result of creating a Response object, - // given a new response, "immutable", and this’s relevant Realm. - const responseObject = new Response() - responseObject[kRealm] = relevantRealm - responseObject[kHeaders][kGuard] = 'immutable' - responseObject[kHeaders][kRealm] = relevantRealm + var formatAttributes = function formatAttributes() { + var attributes = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + var filter = arguments.length > 1 ? arguments[1] : undefined; + var keyVals = getAttributeKeyVals(attributes, filter); + if (keyVals.length === 0) return ""; + var keysValsJoined = keyVals.join(" "); + return " ".concat(keysValsJoined); + }; + /** + * Converts an object to a jstoxml array. + * Example: + * objToArray({ foo: 'bar', baz: 2 }); + * -> + * [ + * { + * _name: 'foo', + * _content: 'bar' + * }, + * { + * _name: 'baz', + * _content: 2 + * } + * ] + */ - // 5. Set responseObject’s response’s status to status. - responseObject[kState].status = status - // 6. Let value be parsedURL, serialized and isomorphic encoded. - const value = isomorphicEncode(URLSerializer(parsedURL)) + var objToArray = function objToArray() { + var obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + return Object.keys(obj).map(function (key) { + return { + _name: key, + _content: obj[key] + }; + }); + }; + /** + * Determines if a value is a primitive JavaScript value (not including Symbol). + * Example: + * isPrimitive(4); + * -> true + */ - // 7. Append `Location`/value to responseObject’s response’s header list. - responseObject[kState].headersList.append('location', value) - // 8. Return responseObject. - return responseObject - } + var PRIMITIVE_TYPES = [STRING, NUMBER, BOOLEAN]; - // https://fetch.spec.whatwg.org/#dom-response - constructor (body = null, init = {}) { - if (body !== null) { - body = webidl.converters.BodyInit(body) - } + var isPrimitive = function isPrimitive(val) { + return PRIMITIVE_TYPES.includes(getType(val)); + }; + /** + * Determines if a value is a simple primitive type that can fit onto one line. Needed for + * determining any needed indenting and line breaks. + * Example: + * isSimpleType(new Date()); + * -> true + */ - init = webidl.converters.ResponseInit(init) - // TODO - this[kRealm] = { settingsObject: {} } + var SIMPLE_TYPES = [].concat(PRIMITIVE_TYPES, [DATE, SPECIAL_OBJECT]); - // 1. Set this’s response to a new response. - this[kState] = makeResponse({}) + var isSimpleType = function isSimpleType(val) { + return SIMPLE_TYPES.includes(getType(val)); + }; + /** + * Determines if an XML string is a simple primitive, or contains nested data. + * Example: + * isSimpleXML(''); + * -> false + */ - // 2. Set this’s headers to a new Headers object with this’s relevant - // Realm, whose header list is this’s response’s header list and guard - // is "response". - this[kHeaders] = new Headers(kConstruct) - this[kHeaders][kGuard] = 'response' - this[kHeaders][kHeadersList] = this[kState].headersList - this[kHeaders][kRealm] = this[kRealm] - // 3. Let bodyWithType be null. - let bodyWithType = null + var isSimpleXML = function isSimpleXML(xmlStr) { + return !xmlStr.match("<"); + }; + /** + * Assembles an XML header as defined by the config. + */ - // 4. If body is non-null, then set bodyWithType to the result of extracting body. - if (body != null) { - const [extractedBody, type] = extractBody(body) - bodyWithType = { body: extractedBody, type } - } - // 5. Perform initialize a response given this, init, and bodyWithType. - initializeResponse(this, init, bodyWithType) - } + var DEFAULT_XML_HEADER = ''; - // Returns response’s type, e.g., "cors". - get type () { - webidl.brandCheck(this, Response) + var getHeaderString = function getHeaderString(_ref) { + var header = _ref.header, + indent = _ref.indent, + isOutputStart = _ref.isOutputStart; + var shouldOutputHeader = header && isOutputStart; + if (!shouldOutputHeader) return ""; + var shouldUseDefaultHeader = _typeof(header) === BOOLEAN; // return `${shouldUseDefaultHeader ? DEFAULT_XML_HEADER : header}${indent ? "\n" : "" + // }`; - // The type getter steps are to return this’s response’s type. - return this[kState].type - } + return shouldUseDefaultHeader ? DEFAULT_XML_HEADER : header; + }; + /** + * Recursively traverses an object tree and converts the output to an XML string. + * Example: + * toXML({ foo: 'bar' }); + * -> bar + */ - // Returns response’s URL, if it has one; otherwise the empty string. - get url () { - webidl.brandCheck(this, Response) - const urlList = this[kState].urlList + var defaultEntityFilter = { + "<": "<", + ">": ">", + "&": "&" + }; + + var toXML = function toXML() { + var obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + var config = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + var _config$depth = config.depth, + depth = _config$depth === void 0 ? 0 : _config$depth, + indent = config.indent, + _isFirstItem = config._isFirstItem, + _config$_isOutputStar = config._isOutputStart, + _isOutputStart = _config$_isOutputStar === void 0 ? true : _config$_isOutputStar, + header = config.header, + _config$attributesFil = config.attributesFilter, + rawAttributesFilter = _config$attributesFil === void 0 ? {} : _config$attributesFil, + _config$filter = config.filter, + rawFilter = _config$filter === void 0 ? {} : _config$filter; + + var shouldTurnOffAttributesFilter = typeof rawAttributesFilter === 'boolean' && !rawAttributesFilter; + var attributesFilter = shouldTurnOffAttributesFilter ? {} : _objectSpread(_objectSpread(_objectSpread({}, defaultEntityFilter), { + '"': """ + }), rawAttributesFilter); + var shouldTurnOffFilter = typeof rawFilter === 'boolean' && !rawFilter; + var filter = shouldTurnOffFilter ? {} : _objectSpread(_objectSpread({}, defaultEntityFilter), rawFilter); // Determine indent string based on depth. + + var indentStr = getIndentStr(indent, depth); // For branching based on value type. + + var valType = getType(obj); + var headerStr = getHeaderString({ + header: header, + indent: indent, + depth: depth, + isOutputStart: _isOutputStart + }); + var isOutputStart = _isOutputStart && !headerStr && _isFirstItem && depth === 0; + var outputStr = ""; + + switch (valType) { + case "special-object": + { + // Processes a specially-formatted object used by jstoxml. + var _name = obj._name, + _content = obj._content; // Output text content without a tag wrapper. + + if (_content === null) { + outputStr = _name; + break; + } // Handles arrays of primitive values. (#33) + + + var isArrayOfPrimitives = Array.isArray(_content) && _content.every(isPrimitive); + + if (isArrayOfPrimitives) { + var primitives = _content.map(function (a) { + return toXML({ + _name: _name, + _content: a + }, _objectSpread(_objectSpread({}, config), {}, { + depth: depth, + _isOutputStart: false + })); + }); - // The url getter steps are to return the empty string if this’s - // response’s URL is null; otherwise this’s response’s URL, - // serialized with exclude fragment set to true. - const url = urlList[urlList.length - 1] ?? null + return primitives.join(''); + } // Don't output private vars (such as _attrs). - if (url === null) { - return '' - } - return URLSerializer(url, true) - } + if (_name.match(PRIVATE_VARS_REGEXP)) break; // Process the nested new value and create new config. - // Returns whether response was obtained through a redirect. - get redirected () { - webidl.brandCheck(this, Response) + var newVal = toXML(_content, _objectSpread(_objectSpread({}, config), {}, { + depth: depth + 1, + _isOutputStart: isOutputStart + })); + var newValType = getType(newVal); + var isNewValSimple = isSimpleXML(newVal); // Pre-tag output (indent and line breaks). - // The redirected getter steps are to return true if this’s response’s URL - // list has more than one item; otherwise false. - return this[kState].urlList.length > 1 - } + var preIndentStr = indent && !isOutputStart ? "\n" : ""; + var preTag = "".concat(preIndentStr).concat(indentStr); // Special handling for comments, preserving preceding line breaks/indents. - // Returns response’s status. - get status () { - webidl.brandCheck(this, Response) + if (_name === '_comment') { + outputStr += "".concat(preTag, ""); + break; + } // Tag output. - // The status getter steps are to return this’s response’s status. - return this[kState].status - } - // Returns whether response’s status is an ok status. - get ok () { - webidl.brandCheck(this, Response) + var valIsEmpty = newValType === "undefined" || newVal === ""; + var shouldSelfClose = _typeof(obj._selfCloseTag) === BOOLEAN ? valIsEmpty && obj._selfCloseTag : valIsEmpty; + var selfCloseStr = shouldSelfClose ? "/" : ""; + var attributesString = formatAttributes(obj._attrs, attributesFilter); + var tag = "<".concat(_name).concat(attributesString).concat(selfCloseStr, ">"); // Post-tag output (closing tag, indent, line breaks). - // The ok getter steps are to return true if this’s response’s status is an - // ok status; otherwise false. - return this[kState].status >= 200 && this[kState].status <= 299 - } + var preTagCloseStr = indent && !isNewValSimple ? "\n".concat(indentStr) : ""; + var postTag = !shouldSelfClose ? "".concat(newVal).concat(preTagCloseStr, "") : ""; + outputStr += "".concat(preTag).concat(tag).concat(postTag); + break; + } - // Returns response’s status message. - get statusText () { - webidl.brandCheck(this, Response) + case "object": + { + // Iterates over keyval pairs in an object, converting each item to a special-object. + var keys = Object.keys(obj); + var outputArr = keys.map(function (key, index) { + var newConfig = _objectSpread(_objectSpread({}, config), {}, { + _isFirstItem: index === 0, + _isLastItem: index + 1 === keys.length, + _isOutputStart: isOutputStart + }); - // The statusText getter steps are to return this’s response’s status - // message. - return this[kState].statusText - } + var outputObj = { + _name: key + }; - // Returns response’s headers as Headers. - get headers () { - webidl.brandCheck(this, Response) + if (getType(obj[key]) === "object") { + // Sub-object contains an object. + // Move private vars up as needed. Needed to support certain types of objects + // E.g. { foo: { _attrs: { a: 1 } } } -> + PRIVATE_VARS.forEach(function (privateVar) { + var val = obj[key][privateVar]; - // The headers getter steps are to return this’s headers. - return this[kHeaders] - } + if (typeof val !== "undefined") { + outputObj[privateVar] = val; + delete obj[key][privateVar]; + } + }); + var hasContent = typeof obj[key]._content !== "undefined"; + + if (hasContent) { + // _content has sibling keys, so pass as an array (edge case). + // E.g. { foo: 'bar', _content: { baz: 2 } } -> bar2 + if (Object.keys(obj[key]).length > 1) { + var newContentObj = Object.assign({}, obj[key]); + delete newContentObj._content; + outputObj._content = [].concat(_toConsumableArray(objToArray(newContentObj)), [obj[key]._content]); + } + } + } // Fallthrough: just pass the key as the content for the new special-object. - get body () { - webidl.brandCheck(this, Response) - return this[kState].body ? this[kState].body.stream : null - } + if (typeof outputObj._content === "undefined") outputObj._content = obj[key]; + var xml = toXML(outputObj, newConfig, key); + return xml; + }, config); + outputStr = outputArr.join(''); + break; + } - get bodyUsed () { - webidl.brandCheck(this, Response) + case "function": + { + // Executes a user-defined function and returns output. + var fnResult = obj(config); + outputStr = toXML(fnResult, config); + break; + } - return !!this[kState].body && util.isDisturbed(this[kState].body.stream) - } + case "array": + { + // Iterates and converts each value in an array. + var _outputArr = obj.map(function (singleVal, index) { + var newConfig = _objectSpread(_objectSpread({}, config), {}, { + _isFirstItem: index === 0, + _isLastItem: index + 1 === obj.length, + _isOutputStart: isOutputStart + }); - // Returns a clone of response. - clone () { - webidl.brandCheck(this, Response) + return toXML(singleVal, newConfig); + }); - // 1. If this is unusable, then throw a TypeError. - if (this.bodyUsed || (this.body && this.body.locked)) { - throw webidl.errors.exception({ - header: 'Response.clone', - message: 'Body has already been consumed.' - }) + outputStr = _outputArr.join(''); + break; + } + // number, string, boolean, date, null, etc + + default: + { + outputStr = filterStr(obj, filter); + break; + } } - // 2. Let clonedResponse be the result of cloning this’s response. - const clonedResponse = cloneResponse(this[kState]) + return "".concat(headerStr).concat(outputStr); + }; - // 3. Return the result of creating a Response object, given - // clonedResponse, this’s headers’s guard, and this’s relevant Realm. - const clonedResponseObject = new Response() - clonedResponseObject[kState] = clonedResponse - clonedResponseObject[kRealm] = this[kRealm] - clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList - clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard] - clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm] + _exports.toXML = toXML; + var _default = { + toXML: toXML + }; + _exports.default = _default; +}); - return clonedResponseObject - } -} -mixinBody(Response) +/***/ }), -Object.defineProperties(Response.prototype, { - type: kEnumerableProperty, - url: kEnumerableProperty, - status: kEnumerableProperty, - ok: kEnumerableProperty, - redirected: kEnumerableProperty, - statusText: kEnumerableProperty, - headers: kEnumerableProperty, - clone: kEnumerableProperty, - body: kEnumerableProperty, - bodyUsed: kEnumerableProperty, - [Symbol.toStringTag]: { - value: 'Response', - configurable: true - } -}) +/***/ 19213: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Object.defineProperties(Response, { - json: kEnumerableProperty, - redirect: kEnumerableProperty, - error: kEnumerableProperty -}) +var root = __nccwpck_require__(89882); -// https://fetch.spec.whatwg.org/#concept-response-clone -function cloneResponse (response) { - // To clone a response response, run these steps: +/** Built-in value references. */ +var Symbol = root.Symbol; - // 1. If response is a filtered response, then return a new identical - // filtered response whose internal response is a clone of response’s - // internal response. - if (response.internalResponse) { - return filterResponse( - cloneResponse(response.internalResponse), - response.type - ) - } +module.exports = Symbol; - // 2. Let newResponse be a copy of response, except for its body. - const newResponse = makeResponse({ ...response, body: null }) - // 3. If response’s body is non-null, then set newResponse’s body to the - // result of cloning response’s body. - if (response.body != null) { - newResponse.body = cloneBody(response.body) - } +/***/ }), - // 4. Return newResponse. - return newResponse -} +/***/ 94356: +/***/ ((module) => { -function makeResponse (init) { - return { - aborted: false, - rangeRequested: false, - timingAllowPassed: false, - requestIncludesCredentials: false, - type: 'default', - status: 200, - timingInfo: null, - cacheState: '', - statusText: '', - ...init, - headersList: init.headersList - ? new HeadersList(init.headersList) - : new HeadersList(), - urlList: init.urlList ? [...init.urlList] : [] +/** + * A specialized version of `_.map` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ +function arrayMap(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length, + result = Array(length); + + while (++index < length) { + result[index] = iteratee(array[index], index, array); } + return result; } -function makeNetworkError (reason) { - const isError = isErrorLike(reason) - return makeResponse({ - type: 'error', - status: 0, - error: isError - ? reason - : new Error(reason ? String(reason) : reason), - aborted: reason && reason.name === 'AbortError' - }) -} +module.exports = arrayMap; -function makeFilteredResponse (response, state) { - state = { - internalResponse: response, - ...state - } - return new Proxy(response, { - get (target, p) { - return p in state ? state[p] : target[p] - }, - set (target, p, value) { - assert(!(p in state)) - target[p] = value - return true - } - }) -} +/***/ }), -// https://fetch.spec.whatwg.org/#concept-filtered-response -function filterResponse (response, type) { - // Set response to the following filtered response with response as its - // internal response, depending on request’s response tainting: - if (type === 'basic') { - // A basic filtered response is a filtered response whose type is "basic" - // and header list excludes any headers in internal response’s header list - // whose name is a forbidden response-header name. +/***/ 97497: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Note: undici does not implement forbidden response-header names - return makeFilteredResponse(response, { - type: 'basic', - headersList: response.headersList - }) - } else if (type === 'cors') { - // A CORS filtered response is a filtered response whose type is "cors" - // and header list excludes any headers in internal response’s header - // list whose name is not a CORS-safelisted response-header name, given - // internal response’s CORS-exposed header-name list. +var Symbol = __nccwpck_require__(19213), + getRawTag = __nccwpck_require__(80923), + objectToString = __nccwpck_require__(14200); - // Note: undici does not implement CORS-safelisted response-header names - return makeFilteredResponse(response, { - type: 'cors', - headersList: response.headersList - }) - } else if (type === 'opaque') { - // An opaque filtered response is a filtered response whose type is - // "opaque", URL list is the empty list, status is 0, status message - // is the empty byte sequence, header list is empty, and body is null. +/** `Object#toString` result references. */ +var nullTag = '[object Null]', + undefinedTag = '[object Undefined]'; - return makeFilteredResponse(response, { - type: 'opaque', - urlList: Object.freeze([]), - status: 0, - statusText: '', - body: null - }) - } else if (type === 'opaqueredirect') { - // An opaque-redirect filtered response is a filtered response whose type - // is "opaqueredirect", status is 0, status message is the empty byte - // sequence, header list is empty, and body is null. +/** Built-in value references. */ +var symToStringTag = Symbol ? Symbol.toStringTag : undefined; - return makeFilteredResponse(response, { - type: 'opaqueredirect', - status: 0, - statusText: '', - headersList: [], - body: null - }) - } else { - assert(false) +/** + * The base implementation of `getTag` without fallbacks for buggy environments. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ +function baseGetTag(value) { + if (value == null) { + return value === undefined ? undefinedTag : nullTag; } + return (symToStringTag && symToStringTag in Object(value)) + ? getRawTag(value) + : objectToString(value); } -// https://fetch.spec.whatwg.org/#appropriate-network-error -function makeAppropriateNetworkError (fetchParams, err = null) { - // 1. Assert: fetchParams is canceled. - assert(isCancelled(fetchParams)) +module.exports = baseGetTag; - // 2. Return an aborted network error if fetchParams is aborted; - // otherwise return a network error. - return isAborted(fetchParams) - ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err })) - : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err })) -} -// https://whatpr.org/fetch/1392.html#initialize-a-response -function initializeResponse (response, init, body) { - // 1. If init["status"] is not in the range 200 to 599, inclusive, then - // throw a RangeError. - if (init.status !== null && (init.status < 200 || init.status > 599)) { - throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.') - } +/***/ }), - // 2. If init["statusText"] does not match the reason-phrase token production, - // then throw a TypeError. - if ('statusText' in init && init.statusText != null) { - // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2: - // reason-phrase = *( HTAB / SP / VCHAR / obs-text ) - if (!isValidReasonPhrase(String(init.statusText))) { - throw new TypeError('Invalid statusText') - } - } +/***/ 96792: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 3. Set response’s response’s status to init["status"]. - if ('status' in init && init.status != null) { - response[kState].status = init.status - } +var Symbol = __nccwpck_require__(19213), + arrayMap = __nccwpck_require__(94356), + isArray = __nccwpck_require__(44869), + isSymbol = __nccwpck_require__(66403); - // 4. Set response’s response’s status message to init["statusText"]. - if ('statusText' in init && init.statusText != null) { - response[kState].statusText = init.statusText - } +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0; - // 5. If init["headers"] exists, then fill response’s headers with init["headers"]. - if ('headers' in init && init.headers != null) { - fill(response[kHeaders], init.headers) +/** Used to convert symbols to primitives and strings. */ +var symbolProto = Symbol ? Symbol.prototype : undefined, + symbolToString = symbolProto ? symbolProto.toString : undefined; + +/** + * The base implementation of `_.toString` which doesn't convert nullish + * values to empty strings. + * + * @private + * @param {*} value The value to process. + * @returns {string} Returns the string. + */ +function baseToString(value) { + // Exit early for strings to avoid a performance hit in some environments. + if (typeof value == 'string') { + return value; + } + if (isArray(value)) { + // Recursively convert values (susceptible to call stack limits). + return arrayMap(value, baseToString) + ''; + } + if (isSymbol(value)) { + return symbolToString ? symbolToString.call(value) : ''; } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; +} - // 6. If body was given, then: - if (body) { - // 1. If response's status is a null body status, then throw a TypeError. - if (nullBodyStatus.includes(response.status)) { - throw webidl.errors.exception({ - header: 'Response constructor', - message: 'Invalid response status code ' + response.status - }) - } +module.exports = baseToString; - // 2. Set response's body to body's body. - response[kState].body = body.body - // 3. If body's type is non-null and response's header list does not contain - // `Content-Type`, then append (`Content-Type`, body's type) to response's header list. - if (body.type != null && !response[kState].headersList.contains('Content-Type')) { - response[kState].headersList.append('content-type', body.type) - } - } -} +/***/ }), -webidl.converters.ReadableStream = webidl.interfaceConverter( - ReadableStream -) +/***/ 52085: +/***/ ((module) => { -webidl.converters.FormData = webidl.interfaceConverter( - FormData -) +/** Detect free variable `global` from Node.js. */ +var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; -webidl.converters.URLSearchParams = webidl.interfaceConverter( - URLSearchParams -) +module.exports = freeGlobal; -// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit -webidl.converters.XMLHttpRequestBodyInit = function (V) { - if (typeof V === 'string') { - return webidl.converters.USVString(V) - } - if (isBlobLike(V)) { - return webidl.converters.Blob(V, { strict: false }) - } +/***/ }), - if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { - return webidl.converters.BufferSource(V) - } +/***/ 80923: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (util.isFormDataLike(V)) { - return webidl.converters.FormData(V, { strict: false }) - } +var Symbol = __nccwpck_require__(19213); - if (V instanceof URLSearchParams) { - return webidl.converters.URLSearchParams(V) - } +/** Used for built-in method references. */ +var objectProto = Object.prototype; - return webidl.converters.DOMString(V) -} +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; -// https://fetch.spec.whatwg.org/#bodyinit -webidl.converters.BodyInit = function (V) { - if (V instanceof ReadableStream) { - return webidl.converters.ReadableStream(V) - } +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var nativeObjectToString = objectProto.toString; - // Note: the spec doesn't include async iterables, - // this is an undici extension. - if (V?.[Symbol.asyncIterator]) { - return V - } +/** Built-in value references. */ +var symToStringTag = Symbol ? Symbol.toStringTag : undefined; - return webidl.converters.XMLHttpRequestBodyInit(V) -} +/** + * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the raw `toStringTag`. + */ +function getRawTag(value) { + var isOwn = hasOwnProperty.call(value, symToStringTag), + tag = value[symToStringTag]; -webidl.converters.ResponseInit = webidl.dictionaryConverter([ - { - key: 'status', - converter: webidl.converters['unsigned short'], - defaultValue: 200 - }, - { - key: 'statusText', - converter: webidl.converters.ByteString, - defaultValue: '' - }, - { - key: 'headers', - converter: webidl.converters.HeadersInit + try { + value[symToStringTag] = undefined; + var unmasked = true; + } catch (e) {} + + var result = nativeObjectToString.call(value); + if (unmasked) { + if (isOwn) { + value[symToStringTag] = tag; + } else { + delete value[symToStringTag]; + } } -]) - -module.exports = { - makeNetworkError, - makeResponse, - makeAppropriateNetworkError, - filterResponse, - Response, - cloneResponse + return result; } +module.exports = getRawTag; + /***/ }), -/***/ 5861: +/***/ 14200: /***/ ((module) => { -"use strict"; +/** Used for built-in method references. */ +var objectProto = Object.prototype; +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var nativeObjectToString = objectProto.toString; -module.exports = { - kUrl: Symbol('url'), - kHeaders: Symbol('headers'), - kSignal: Symbol('signal'), - kState: Symbol('state'), - kGuard: Symbol('guard'), - kRealm: Symbol('realm') +/** + * Converts `value` to a string using `Object.prototype.toString`. + * + * @private + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + */ +function objectToString(value) { + return nativeObjectToString.call(value); } +module.exports = objectToString; + /***/ }), -/***/ 2538: +/***/ 89882: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var freeGlobal = __nccwpck_require__(52085); +/** Detect free variable `self`. */ +var freeSelf = typeof self == 'object' && self && self.Object === Object && self; -const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = __nccwpck_require__(1037) -const { getGlobalOrigin } = __nccwpck_require__(1246) -const { performance } = __nccwpck_require__(4074) -const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3983) -const assert = __nccwpck_require__(9491) -const { isUint8Array } = __nccwpck_require__(9830) +/** Used as a reference to the global object. */ +var root = freeGlobal || freeSelf || Function('return this')(); -let supportedHashes = [] +module.exports = root; -// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable -/** @type {import('crypto')|undefined} */ -let crypto -try { - crypto = __nccwpck_require__(6113) - const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] - supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) -/* c8 ignore next 3 */ -} catch { -} +/***/ }), -function responseURL (response) { - // https://fetch.spec.whatwg.org/#responses - // A response has an associated URL. It is a pointer to the last URL - // in response’s URL list and null if response’s URL list is empty. - const urlList = response.urlList - const length = urlList.length - return length === 0 ? null : urlList[length - 1].toString() -} +/***/ 44869: +/***/ ((module) => { -// https://fetch.spec.whatwg.org/#concept-response-location-url -function responseLocationURL (response, requestFragment) { - // 1. If response’s status is not a redirect status, then return null. - if (!redirectStatusSet.has(response.status)) { - return null - } +/** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ +var isArray = Array.isArray; - // 2. Let location be the result of extracting header list values given - // `Location` and response’s header list. - let location = response.headersList.get('location') +module.exports = isArray; - // 3. If location is a header value, then set location to the result of - // parsing location with response’s URL. - if (location !== null && isValidHeaderValue(location)) { - location = new URL(location, responseURL(response)) - } - // 4. If location is a URL whose fragment is null, then set location’s - // fragment to requestFragment. - if (location && !location.hash) { - location.hash = requestFragment - } +/***/ }), - // 5. Return location. - return location -} +/***/ 33334: +/***/ ((module) => { -/** @returns {URL} */ -function requestCurrentURL (request) { - return request.urlList[request.urlList.length - 1] +/** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ +function isObject(value) { + var type = typeof value; + return value != null && (type == 'object' || type == 'function'); } -function requestBadPort (request) { - // 1. Let url be request’s current URL. - const url = requestCurrentURL(request) +module.exports = isObject; - // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port, - // then return blocked. - if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { - return 'blocked' - } - // 3. Return allowed. - return 'allowed' -} +/***/ }), -function isErrorLike (object) { - return object instanceof Error || ( - object?.constructor?.name === 'Error' || - object?.constructor?.name === 'DOMException' - ) -} +/***/ 85926: +/***/ ((module) => { -// Check whether |statusText| is a ByteString and -// matches the Reason-Phrase token production. -// RFC 2616: https://tools.ietf.org/html/rfc2616 -// RFC 7230: https://tools.ietf.org/html/rfc7230 -// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )" -// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116 -function isValidReasonPhrase (statusText) { - for (let i = 0; i < statusText.length; ++i) { - const c = statusText.charCodeAt(i) - if ( - !( - ( - c === 0x09 || // HTAB - (c >= 0x20 && c <= 0x7e) || // SP / VCHAR - (c >= 0x80 && c <= 0xff) - ) // obs-text - ) - ) { - return false - } - } - return true +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return value != null && typeof value == 'object'; } +module.exports = isObjectLike; + + +/***/ }), + +/***/ 65704: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var baseGetTag = __nccwpck_require__(97497), + isArray = __nccwpck_require__(44869), + isObjectLike = __nccwpck_require__(85926); + +/** `Object#toString` result references. */ +var stringTag = '[object String]'; + /** - * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 - * @param {number} c + * Checks if `value` is classified as a `String` primitive or object. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a string, else `false`. + * @example + * + * _.isString('abc'); + * // => true + * + * _.isString(1); + * // => false */ -function isTokenCharCode (c) { - switch (c) { - case 0x22: - case 0x28: - case 0x29: - case 0x2c: - case 0x2f: - case 0x3a: - case 0x3b: - case 0x3c: - case 0x3d: - case 0x3e: - case 0x3f: - case 0x40: - case 0x5b: - case 0x5c: - case 0x5d: - case 0x7b: - case 0x7d: - // DQUOTE and "(),/:;<=>?@[\]{}" - return false - default: - // VCHAR %x21-7E - return c >= 0x21 && c <= 0x7e - } +function isString(value) { + return typeof value == 'string' || + (!isArray(value) && isObjectLike(value) && baseGetTag(value) == stringTag); } +module.exports = isString; + + +/***/ }), + +/***/ 66403: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var baseGetTag = __nccwpck_require__(97497), + isObjectLike = __nccwpck_require__(85926); + +/** `Object#toString` result references. */ +var symbolTag = '[object Symbol]'; + /** - * @param {string} characters + * Checks if `value` is classified as a `Symbol` primitive or object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. + * @example + * + * _.isSymbol(Symbol.iterator); + * // => true + * + * _.isSymbol('abc'); + * // => false */ -function isValidHTTPToken (characters) { - if (characters.length === 0) { - return false - } - for (let i = 0; i < characters.length; ++i) { - if (!isTokenCharCode(characters.charCodeAt(i))) { - return false - } - } - return true +function isSymbol(value) { + return typeof value == 'symbol' || + (isObjectLike(value) && baseGetTag(value) == symbolTag); } +module.exports = isSymbol; + + +/***/ }), + +/***/ 32931: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var baseToString = __nccwpck_require__(96792); + /** - * @see https://fetch.spec.whatwg.org/#header-name - * @param {string} potentialValue + * Converts `value` to a string. An empty string is returned for `null` + * and `undefined` values. The sign of `-0` is preserved. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.toString(null); + * // => '' + * + * _.toString(-0); + * // => '-0' + * + * _.toString([1, 2, 3]); + * // => '1,2,3' */ -function isValidHeaderName (potentialValue) { - return isValidHTTPToken(potentialValue) +function toString(value) { + return value == null ? '' : baseToString(value); } +module.exports = toString; + + +/***/ }), + +/***/ 11149: +/***/ ((module) => { + +"use strict"; +/*! + * merge-descriptors + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + + + /** - * @see https://fetch.spec.whatwg.org/#header-value - * @param {string} potentialValue + * Module exports. + * @public */ -function isValidHeaderValue (potentialValue) { - // - Has no leading or trailing HTTP tab or space bytes. - // - Contains no 0x00 (NUL) or HTTP newline bytes. - if ( - potentialValue.startsWith('\t') || - potentialValue.startsWith(' ') || - potentialValue.endsWith('\t') || - potentialValue.endsWith(' ') - ) { - return false + +module.exports = merge + +/** + * Module variables. + * @private + */ + +var hasOwnProperty = Object.prototype.hasOwnProperty + +/** + * Merge the property descriptors of `src` into `dest` + * + * @param {object} dest Object to add descriptors to + * @param {object} src Object to clone descriptors from + * @param {boolean} [redefine=true] Redefine `dest` properties with `src` properties + * @returns {object} Reference to dest + * @public + */ + +function merge (dest, src, redefine) { + if (!dest) { + throw new TypeError('argument dest is required') } - if ( - potentialValue.includes('\0') || - potentialValue.includes('\r') || - potentialValue.includes('\n') - ) { - return false + if (!src) { + throw new TypeError('argument src is required') } - return true + if (redefine === undefined) { + // Default to true + redefine = true + } + + Object.getOwnPropertyNames(src).forEach(function forEachOwnPropertyName (name) { + if (!redefine && hasOwnProperty.call(dest, name)) { + // Skip descriptor + return + } + + // Copy descriptor + var descriptor = Object.getOwnPropertyDescriptor(src, name) + Object.defineProperty(dest, name, descriptor) + }) + + return dest } -// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect -function setRequestReferrerPolicyOnRedirect (request, actualResponse) { - // Given a request request and a response actualResponse, this algorithm - // updates request’s referrer policy according to the Referrer-Policy - // header (if any) in actualResponse. - // 1. Let policy be the result of executing § 8.1 Parse a referrer policy - // from a Referrer-Policy header on actualResponse. +/***/ }), - // 8.1 Parse a referrer policy from a Referrer-Policy header - // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list. - const { headersList } = actualResponse - // 2. Let policy be the empty string. - // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token. - // 4. Return policy. - const policyHeader = (headersList.get('referrer-policy') ?? '').split(',') +/***/ 47426: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Note: As the referrer-policy can contain multiple policies - // separated by comma, we need to loop through all of them - // and pick the first valid one. - // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy - let policy = '' - if (policyHeader.length > 0) { - // The right-most policy takes precedence. - // The left-most policy is the fallback. - for (let i = policyHeader.length; i !== 0; i--) { - const token = policyHeader[i - 1].trim() - if (referrerPolicyTokens.has(token)) { - policy = token - break - } - } - } +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ - // 2. If policy is not the empty string, then set request’s referrer policy to policy. - if (policy !== '') { - request.referrerPolicy = policy - } -} +/** + * Module exports. + */ -// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check -function crossOriginResourcePolicyCheck () { - // TODO - return 'allowed' -} +module.exports = __nccwpck_require__(53765) -// https://fetch.spec.whatwg.org/#concept-cors-check -function corsCheck () { - // TODO - return 'success' -} -// https://fetch.spec.whatwg.org/#concept-tao-check -function TAOCheck () { - // TODO - return 'success' -} +/***/ }), -function appendFetchMetadata (httpRequest) { - // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header - // TODO +/***/ 43583: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header +"use strict"; +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ - // 1. Assert: r’s url is a potentially trustworthy URL. - // TODO - // 2. Let header be a Structured Header whose value is a token. - let header = null - // 3. Set header’s value to r’s mode. - header = httpRequest.mode +/** + * Module dependencies. + * @private + */ - // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list. - httpRequest.headersList.set('sec-fetch-mode', header) +var db = __nccwpck_require__(47426) +var extname = (__nccwpck_require__(71017).extname) - // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header - // TODO +/** + * Module variables. + * @private + */ - // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header - // TODO -} +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i -// https://fetch.spec.whatwg.org/#append-a-request-origin-header -function appendRequestOriginHeader (request) { - // 1. Let serializedOrigin be the result of byte-serializing a request origin with request. - let serializedOrigin = request.origin +/** + * Module exports. + * @public + */ - // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list. - if (request.responseTainting === 'cors' || request.mode === 'websocket') { - if (serializedOrigin) { - request.headersList.append('origin', serializedOrigin) - } +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) - // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then: - } else if (request.method !== 'GET' && request.method !== 'HEAD') { - // 1. Switch on request’s referrer policy: - switch (request.referrerPolicy) { - case 'no-referrer': - // Set serializedOrigin to `null`. - serializedOrigin = null - break - case 'no-referrer-when-downgrade': - case 'strict-origin': - case 'strict-origin-when-cross-origin': - // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`. - if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) { - serializedOrigin = null - } - break - case 'same-origin': - // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`. - if (!sameOrigin(request, requestCurrentURL(request))) { - serializedOrigin = null - } - break - default: - // Do nothing. - } +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) - if (serializedOrigin) { - // 2. Append (`Origin`, serializedOrigin) to request’s header list. - request.headersList.append('origin', serializedOrigin) - } +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false } -} -function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) { - // TODO - return performance.now() -} + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] -// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info -function createOpaqueTimingInfo (timingInfo) { - return { - startTime: timingInfo.startTime ?? 0, - redirectStartTime: 0, - redirectEndTime: 0, - postRedirectStartTime: timingInfo.startTime ?? 0, - finalServiceWorkerStartTime: 0, - finalNetworkResponseStartTime: 0, - finalNetworkRequestStartTime: 0, - endTime: 0, - encodedBodySize: 0, - decodedBodySize: 0, - finalConnectionTimingInfo: null + if (mime && mime.charset) { + return mime.charset } -} -// https://html.spec.whatwg.org/multipage/origin.html#policy-container -function makePolicyContainer () { - // Note: the fetch spec doesn't make use of embedder policy or CSP list - return { - referrerPolicy: 'strict-origin-when-cross-origin' + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' } -} -// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container -function clonePolicyContainer (policyContainer) { - return { - referrerPolicy: policyContainer.referrerPolicy - } + return false } -// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer -function determineRequestsReferrer (request) { - // 1. Let policy be request's referrer policy. - const policy = request.referrerPolicy +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ - // Note: policy cannot (shouldn't) be null or an empty string. - assert(policy) +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } - // 2. Let environment be request’s client. + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str - let referrerSource = null + if (!mime) { + return false + } - // 3. Switch on request’s referrer: - if (request.referrer === 'client') { - // Note: node isn't a browser and doesn't implement document/iframes, - // so we bypass this step and replace it with our own. + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } - const globalOrigin = getGlobalOrigin() + return mime +} - if (!globalOrigin || globalOrigin.origin === 'null') { - return 'no-referrer' - } +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ - // note: we need to clone it as it's mutated - referrerSource = new URL(globalOrigin) - } else if (request.referrer instanceof URL) { - // Let referrerSource be request’s referrer. - referrerSource = request.referrer +function extension (type) { + if (!type || typeof type !== 'string') { + return false } - // 4. Let request’s referrerURL be the result of stripping referrerSource for - // use as a referrer. - let referrerURL = stripURLForReferrer(referrerSource) + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) - // 5. Let referrerOrigin be the result of stripping referrerSource for use as - // a referrer, with the origin-only flag set to true. - const referrerOrigin = stripURLForReferrer(referrerSource, true) + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] - // 6. If the result of serializing referrerURL is a string whose length is - // greater than 4096, set referrerURL to referrerOrigin. - if (referrerURL.toString().length > 4096) { - referrerURL = referrerOrigin + if (!exts || !exts.length) { + return false } - const areSameOrigin = sameOrigin(request, referrerURL) - const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && - !isURLPotentiallyTrustworthy(request.url) - - // 8. Execute the switch statements corresponding to the value of policy: - switch (policy) { - case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true) - case 'unsafe-url': return referrerURL - case 'same-origin': - return areSameOrigin ? referrerOrigin : 'no-referrer' - case 'origin-when-cross-origin': - return areSameOrigin ? referrerURL : referrerOrigin - case 'strict-origin-when-cross-origin': { - const currentURL = requestCurrentURL(request) + return exts[0] +} - // 1. If the origin of referrerURL and the origin of request’s current - // URL are the same, then return referrerURL. - if (sameOrigin(referrerURL, currentURL)) { - return referrerURL - } +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ - // 2. If referrerURL is a potentially trustworthy URL and request’s - // current URL is not a potentially trustworthy URL, then return no - // referrer. - if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) { - return 'no-referrer' - } +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } - // 3. Return referrerOrigin. - return referrerOrigin - } - case 'strict-origin': // eslint-disable-line - /** - * 1. If referrerURL is a potentially trustworthy URL and - * request’s current URL is not a potentially trustworthy URL, - * then return no referrer. - * 2. Return referrerOrigin - */ - case 'no-referrer-when-downgrade': // eslint-disable-line - /** - * 1. If referrerURL is a potentially trustworthy URL and - * request’s current URL is not a potentially trustworthy URL, - * then return no referrer. - * 2. Return referrerOrigin - */ + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) - default: // eslint-disable-line - return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin + if (!extension) { + return false } + + return exports.types[extension] || false } /** - * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url - * @param {URL} url - * @param {boolean|undefined} originOnly + * Populate the extensions and types maps. + * @private */ -function stripURLForReferrer (url, originOnly) { - // 1. Assert: url is a URL. - assert(url instanceof URL) - // 2. If url’s scheme is a local scheme, then return no referrer. - if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') { - return 'no-referrer' - } +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] - // 3. Set url’s username to the empty string. - url.username = '' + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions - // 4. Set url’s password to the empty string. - url.password = '' + if (!exts || !exts.length) { + return + } - // 5. Set url’s fragment to null. - url.hash = '' + // mime -> extensions + extensions[type] = exts - // 6. If the origin-only flag is true, then: - if (originOnly) { - // 1. Set url’s path to « the empty string ». - url.pathname = '' + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] - // 2. Set url’s query to null. - url.search = '' - } + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) - // 7. Return url. - return url + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } + + // set the extension -> mime + types[extension] = type + } + }) } -function isURLPotentiallyTrustworthy (url) { - if (!(url instanceof URL)) { - return false - } - // If child of about, return true - if (url.href === 'about:blank' || url.href === 'about:srcdoc') { - return true +/***/ }), + +/***/ 46038: +/***/ ((module) => { + +"use strict"; + + +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); + + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); } - // If scheme is data, return true - if (url.protocol === 'data:') return true + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} - // If file, return true - if (url.protocol === 'file:') return true +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); - return isOriginPotentiallyTrustworthy(url.origin) + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; - function isOriginPotentiallyTrustworthy (origin) { - // If origin is explicitly null, return false - if (origin == null || origin === 'null') return false + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } - const originAsURL = new URL(origin) + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } - // If secure, return true - if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') { - return true + this._types[ext] = type; } - // If localhost or variants, return true - if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) || - (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) || - (originAsURL.hostname.endsWith('.localhost'))) { - return true + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); } - - // If any other, return false - return false } -} +}; /** - * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist - * @param {Uint8Array} bytes - * @param {string} metadataList + * Lookup a mime type based on extension */ -function bytesMatch (bytes, metadataList) { - // If node is not built with OpenSSL support, we cannot check - // a request's integrity, so allow it by default (the spec will - // allow requests if an invalid hash is given, as precedence). - /* istanbul ignore if: only if node is built with --without-ssl */ - if (crypto === undefined) { - return true - } +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); - // 1. Let parsedMetadata be the result of parsing metadataList. - const parsedMetadata = parseMetadata(metadataList) + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; - // 2. If parsedMetadata is no metadata, return true. - if (parsedMetadata === 'no metadata') { - return true - } + return (hasDot || !hasPath) && this._types[ext] || null; +}; - // 3. If response is not eligible for integrity validation, return false. - // TODO +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; - // 4. If parsedMetadata is the empty set, return true. - if (parsedMetadata.length === 0) { - return true - } +module.exports = Mime; - // 5. Let metadata be the result of getting the strongest - // metadata from parsedMetadata. - const strongest = getStrongestMetadata(parsedMetadata) - const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) - // 6. For each item in metadata: - for (const item of metadata) { - // 1. Let algorithm be the alg component of item. - const algorithm = item.algo +/***/ }), - // 2. Let expectedValue be the val component of item. - const expectedValue = item.hash +/***/ 29994: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e - // "be liberal with padding". This is annoying, and it's not even in the spec. +"use strict"; - // 3. Let actualValue be the result of applying algorithm to bytes. - let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue[actualValue.length - 1] === '=') { - if (actualValue[actualValue.length - 2] === '=') { - actualValue = actualValue.slice(0, -2) - } else { - actualValue = actualValue.slice(0, -1) - } - } +let Mime = __nccwpck_require__(46038); +module.exports = new Mime(__nccwpck_require__(13114), __nccwpck_require__(38809)); - // 4. If actualValue is a case-sensitive match for expectedValue, - // return true. - if (compareBase64Mixed(actualValue, expectedValue)) { - return true - } - } - // 7. Return false. - return false -} +/***/ }), -// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options -// https://www.w3.org/TR/CSP2/#source-list-syntax -// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i +/***/ 38809: +/***/ ((module) => { -/** - * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata - * @param {string} metadata - */ -function parseMetadata (metadata) { - // 1. Let result be the empty set. - /** @type {{ algo: string, hash: string }[]} */ - const result = [] +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; - // 2. Let empty be equal to true. - let empty = true +/***/ }), - // 3. For each token returned by splitting metadata on spaces: - for (const token of metadata.split(' ')) { - // 1. Set empty to false. - empty = false +/***/ 13114: +/***/ ((module) => { - // 2. Parse token as a hash-with-options. - const parsedToken = parseHashWithOptions.exec(token) +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; - // 3. If token does not parse, continue to the next token. - if ( - parsedToken === null || - parsedToken.groups === undefined || - parsedToken.groups.algo === undefined - ) { - // Note: Chromium blocks the request at this point, but Firefox - // gives a warning that an invalid integrity was given. The - // correct behavior is to ignore these, and subsequently not - // check the integrity of the resource. - continue - } +/***/ }), - // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo.toLowerCase() +/***/ 66186: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 5. If algorithm is a hash function recognized by the user - // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm)) { - result.push(parsedToken.groups) - } - } +var path = __nccwpck_require__(71017); +var fs = __nccwpck_require__(57147); +var _0777 = parseInt('0777', 8); - // 4. Return no metadata if empty is true, otherwise return result. - if (empty === true) { - return 'no metadata' - } +module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; - return result +function mkdirP (p, opts, f, made) { + if (typeof opts === 'function') { + f = opts; + opts = {}; + } + else if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 + } + if (!made) made = null; + + var cb = f || /* istanbul ignore next */ function () {}; + p = path.resolve(p); + + xfs.mkdir(p, mode, function (er) { + if (!er) { + made = made || p; + return cb(null, made); + } + switch (er.code) { + case 'ENOENT': + /* istanbul ignore if */ + if (path.dirname(p) === p) return cb(er); + mkdirP(path.dirname(p), opts, function (er, made) { + /* istanbul ignore if */ + if (er) cb(er, made); + else mkdirP(p, opts, cb, made); + }); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + xfs.stat(p, function (er2, stat) { + // if the stat fails, then that's super weird. + // let the original error be the failure reason. + if (er2 || !stat.isDirectory()) cb(er, made) + else cb(null, made); + }); + break; + } + }); } -/** - * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList - */ -function getStrongestMetadata (metadataList) { - // Let algorithm be the algo component of the first item in metadataList. - // Can be sha256 - let algorithm = metadataList[0].algo - // If the algorithm is sha512, then it is the strongest - // and we can return immediately - if (algorithm[3] === '5') { - return algorithm - } - - for (let i = 1; i < metadataList.length; ++i) { - const metadata = metadataList[i] - // If the algorithm is sha512, then it is the strongest - // and we can break the loop immediately - if (metadata.algo[3] === '5') { - algorithm = 'sha512' - break - // If the algorithm is sha384, then a potential sha256 or sha384 is ignored - } else if (algorithm[3] === '3') { - continue - // algorithm is sha256, check if algorithm is sha384 and if so, set it as - // the strongest - } else if (metadata.algo[3] === '3') { - algorithm = 'sha384' +mkdirP.sync = function sync (p, opts, made) { + if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; } - } - return algorithm -} + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 + } + if (!made) made = null; -function filterMetadataListByAlgorithm (metadataList, algorithm) { - if (metadataList.length === 1) { - return metadataList - } + p = path.resolve(p); - let pos = 0 - for (let i = 0; i < metadataList.length; ++i) { - if (metadataList[i].algo === algorithm) { - metadataList[pos++] = metadataList[i] + try { + xfs.mkdirSync(p, mode); + made = made || p; + } + catch (err0) { + switch (err0.code) { + case 'ENOENT' : + made = sync(path.dirname(p), opts, made); + sync(p, opts, made); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + var stat; + try { + stat = xfs.statSync(p); + } + catch (err1) /* istanbul ignore next */ { + throw err0; + } + /* istanbul ignore if */ + if (!stat.isDirectory()) throw err0; + break; + } } - } - metadataList.length = pos + return made; +}; - return metadataList -} + +/***/ }), + +/***/ 80900: +/***/ ((module) => { /** - * Compares two base64 strings, allowing for base64url - * in the second string. + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. * -* @param {string} actualValue always base64 - * @param {string} expectedValue base64 or base64url - * @returns {boolean} + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public */ -function compareBase64Mixed (actualValue, expectedValue) { - if (actualValue.length !== expectedValue.length) { - return false + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; } - for (let i = 0; i < actualValue.length; ++i) { - if (actualValue[i] !== expectedValue[i]) { - if ( - (actualValue[i] === '+' && expectedValue[i] === '-') || - (actualValue[i] === '/' && expectedValue[i] === '_') - ) { - continue - } - return false - } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; } - - return true } -// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request -function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { - // TODO +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; } /** - * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin} - * @param {URL} A - * @param {URL} B + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private */ -function sameOrigin (A, B) { - // 1. If A and B are the same opaque origin, then return true. - if (A.origin === B.origin && A.origin === 'null') { - return true - } - // 2. If A and B are both tuple origins and their schemes, - // hosts, and port are identical, then return true. - if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) { - return true +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); } - - // 3. Return false. - return false + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; } -function createDeferredPromise () { - let res - let rej - const promise = new Promise((resolve, reject) => { - res = resolve - rej = reject - }) +/** + * Pluralization helper. + */ - return { promise, resolve: res, reject: rej } +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); } -function isAborted (fetchParams) { - return fetchParams.controller.state === 'aborted' -} -function isCancelled (fetchParams) { - return fetchParams.controller.state === 'aborted' || - fetchParams.controller.state === 'terminated' -} +/***/ }), -const normalizeMethodRecord = { - delete: 'DELETE', - DELETE: 'DELETE', - get: 'GET', - GET: 'GET', - head: 'HEAD', - HEAD: 'HEAD', - options: 'OPTIONS', - OPTIONS: 'OPTIONS', - post: 'POST', - POST: 'POST', - put: 'PUT', - PUT: 'PUT' -} +/***/ 35573: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. -Object.setPrototypeOf(normalizeMethodRecord, null) -/** - * @see https://fetch.spec.whatwg.org/#concept-method-normalize - * @param {string} method - */ -function normalizeMethod (method) { - return normalizeMethodRecord[method.toLowerCase()] ?? method -} +var Promise = __nccwpck_require__(55768) +var fs +try { + fs = __nccwpck_require__(77758) +} catch(err) { + fs = __nccwpck_require__(57147) +} + +var api = [ + 'appendFile', + 'chmod', + 'chown', + 'close', + 'fchmod', + 'fchown', + 'fdatasync', + 'fstat', + 'fsync', + 'ftruncate', + 'futimes', + 'lchown', + 'link', + 'lstat', + 'mkdir', + 'open', + 'read', + 'readFile', + 'readdir', + 'readlink', + 'realpath', + 'rename', + 'rmdir', + 'stat', + 'symlink', + 'truncate', + 'unlink', + 'utimes', + 'write', + 'writeFile' +] -// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string -function serializeJavascriptValueToJSONString (value) { - // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »). - const result = JSON.stringify(value) +typeof fs.access === 'function' && api.push('access') +typeof fs.copyFile === 'function' && api.push('copyFile') +typeof fs.mkdtemp === 'function' && api.push('mkdtemp') - // 2. If result is undefined, then throw a TypeError. - if (result === undefined) { - throw new TypeError('Value is not JSON serializable') +__nccwpck_require__(38690).withCallback(fs, exports, api) + +exports.exists = function (filename, callback) { + // callback + if (typeof callback === 'function') { + return fs.stat(filename, function (err) { + callback(null, !err); + }) } + // or promise + return new Promise(function (resolve) { + fs.stat(filename, function (err) { + resolve(!err) + }) + }) +} - // 3. Assert: result is a string. - assert(typeof result === 'string') - // 4. Return result. - return result -} +/***/ }), -// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object -const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) +/***/ 26002: +/***/ ((module) => { -/** - * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object - * @param {() => unknown[]} iterator - * @param {string} name name of the instance - * @param {'key'|'value'|'key+value'} kind - */ -function makeIterator (iterator, name, kind) { - const object = { - index: 0, - kind, - target: iterator - } +"use strict"; - const i = { - next () { - // 1. Let interface be the interface for which the iterator prototype object exists. - // 2. Let thisValue be the this value. +var zero = function (n, max) { + n = n.toString(16).toUpperCase(); + while (n.length < max) { + n = '0' + n; + } + return n; +}; - // 3. Let object be ? ToObject(thisValue). +module.exports = function (buffer) { + var rows = Math.ceil(buffer.length / 16); + var last = buffer.length % 16 || 16; + var offsetLength = buffer.length.toString(16).length; + if (offsetLength < 6) offsetLength = 6; - // 4. If object is a platform object, then perform a security - // check, passing: + var i; + var str = ''; - // 5. If object is not a default iterator object for interface, - // then throw a TypeError. - if (Object.getPrototypeOf(this) !== i) { - throw new TypeError( - `'next' called on an object that does not implement interface ${name} Iterator.` - ) - } + var b = 0; + var lastBytes; + var lastSpaces; + var v; - // 6. Let index be object’s index. - // 7. Let kind be object’s kind. - // 8. Let values be object’s target's value pairs to iterate over. - const { index, kind, target } = object - const values = target() + for (i = 0; i < rows; i++) { + str += '\u001b[36m' + zero(b, offsetLength) + '\u001b[0m '; + lastBytes = i === rows - 1 ? last : 16; + lastSpaces = 16 - lastBytes; - // 9. Let len be the length of values. - const len = values.length + var j; + for (j = 0; j < lastBytes; j++) { + str += ' ' + zero(buffer[b], 2); + b++; + } - // 10. If index is greater than or equal to len, then return - // CreateIterResultObject(undefined, true). - if (index >= len) { - return { value: undefined, done: true } - } + for (j = 0; j < lastSpaces; j++) { + str += ' '; + } - // 11. Let pair be the entry in values at index index. - const pair = values[index] + b -= lastBytes; + str += ' '; - // 12. Set object’s index to index + 1. - object.index = index + 1 + for (j = 0; j < lastBytes; j++) { + v = buffer[b]; + str += (v > 31 && v < 127) || v > 159 ? String.fromCharCode(v) : '.'; + b++; + } - // 13. Return the iterator result for pair and kind. - return iteratorResult(pair, kind) - }, - // The class string of an iterator prototype object for a given interface is the - // result of concatenating the identifier of the interface and the string " Iterator". - [Symbol.toStringTag]: `${name} Iterator` + str += '\n'; } - // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%. - Object.setPrototypeOf(i, esIteratorPrototype) - // esIteratorPrototype needs to be the prototype of i - // which is the prototype of an empty object. Yes, it's confusing. - return Object.setPrototypeOf({}, i) + return str; +}; + + +/***/ }), + +/***/ 20504: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var hasMap = typeof Map === 'function' && Map.prototype; +var mapSizeDescriptor = Object.getOwnPropertyDescriptor && hasMap ? Object.getOwnPropertyDescriptor(Map.prototype, 'size') : null; +var mapSize = hasMap && mapSizeDescriptor && typeof mapSizeDescriptor.get === 'function' ? mapSizeDescriptor.get : null; +var mapForEach = hasMap && Map.prototype.forEach; +var hasSet = typeof Set === 'function' && Set.prototype; +var setSizeDescriptor = Object.getOwnPropertyDescriptor && hasSet ? Object.getOwnPropertyDescriptor(Set.prototype, 'size') : null; +var setSize = hasSet && setSizeDescriptor && typeof setSizeDescriptor.get === 'function' ? setSizeDescriptor.get : null; +var setForEach = hasSet && Set.prototype.forEach; +var hasWeakMap = typeof WeakMap === 'function' && WeakMap.prototype; +var weakMapHas = hasWeakMap ? WeakMap.prototype.has : null; +var hasWeakSet = typeof WeakSet === 'function' && WeakSet.prototype; +var weakSetHas = hasWeakSet ? WeakSet.prototype.has : null; +var hasWeakRef = typeof WeakRef === 'function' && WeakRef.prototype; +var weakRefDeref = hasWeakRef ? WeakRef.prototype.deref : null; +var booleanValueOf = Boolean.prototype.valueOf; +var objectToString = Object.prototype.toString; +var functionToString = Function.prototype.toString; +var $match = String.prototype.match; +var $slice = String.prototype.slice; +var $replace = String.prototype.replace; +var $toUpperCase = String.prototype.toUpperCase; +var $toLowerCase = String.prototype.toLowerCase; +var $test = RegExp.prototype.test; +var $concat = Array.prototype.concat; +var $join = Array.prototype.join; +var $arrSlice = Array.prototype.slice; +var $floor = Math.floor; +var bigIntValueOf = typeof BigInt === 'function' ? BigInt.prototype.valueOf : null; +var gOPS = Object.getOwnPropertySymbols; +var symToString = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ? Symbol.prototype.toString : null; +var hasShammedSymbols = typeof Symbol === 'function' && typeof Symbol.iterator === 'object'; +// ie, `has-tostringtag/shams +var toStringTag = typeof Symbol === 'function' && Symbol.toStringTag && (typeof Symbol.toStringTag === hasShammedSymbols ? 'object' : 'symbol') + ? Symbol.toStringTag + : null; +var isEnumerable = Object.prototype.propertyIsEnumerable; + +var gPO = (typeof Reflect === 'function' ? Reflect.getPrototypeOf : Object.getPrototypeOf) || ( + [].__proto__ === Array.prototype // eslint-disable-line no-proto + ? function (O) { + return O.__proto__; // eslint-disable-line no-proto + } + : null +); + +function addNumericSeparator(num, str) { + if ( + num === Infinity + || num === -Infinity + || num !== num + || (num && num > -1000 && num < 1000) + || $test.call(/e/, str) + ) { + return str; + } + var sepRegex = /[0-9](?=(?:[0-9]{3})+(?![0-9]))/g; + if (typeof num === 'number') { + var int = num < 0 ? -$floor(-num) : $floor(num); // trunc(num) + if (int !== num) { + var intStr = String(int); + var dec = $slice.call(str, intStr.length + 1); + return $replace.call(intStr, sepRegex, '$&_') + '.' + $replace.call($replace.call(dec, /([0-9]{3})/g, '$&_'), /_$/, ''); + } + } + return $replace.call(str, sepRegex, '$&_'); } -// https://webidl.spec.whatwg.org/#iterator-result -function iteratorResult (pair, kind) { - let result +var utilInspect = __nccwpck_require__(37265); +var inspectCustom = utilInspect.custom; +var inspectSymbol = isSymbol(inspectCustom) ? inspectCustom : null; - // 1. Let result be a value determined by the value of kind: - switch (kind) { - case 'key': { - // 1. Let idlKey be pair’s key. - // 2. Let key be the result of converting idlKey to an - // ECMAScript value. - // 3. result is key. - result = pair[0] - break +module.exports = function inspect_(obj, options, depth, seen) { + var opts = options || {}; + + if (has(opts, 'quoteStyle') && (opts.quoteStyle !== 'single' && opts.quoteStyle !== 'double')) { + throw new TypeError('option "quoteStyle" must be "single" or "double"'); } - case 'value': { - // 1. Let idlValue be pair’s value. - // 2. Let value be the result of converting idlValue to - // an ECMAScript value. - // 3. result is value. - result = pair[1] - break + if ( + has(opts, 'maxStringLength') && (typeof opts.maxStringLength === 'number' + ? opts.maxStringLength < 0 && opts.maxStringLength !== Infinity + : opts.maxStringLength !== null + ) + ) { + throw new TypeError('option "maxStringLength", if provided, must be a positive integer, Infinity, or `null`'); } - case 'key+value': { - // 1. Let idlKey be pair’s key. - // 2. Let idlValue be pair’s value. - // 3. Let key be the result of converting idlKey to an - // ECMAScript value. - // 4. Let value be the result of converting idlValue to - // an ECMAScript value. - // 5. Let array be ! ArrayCreate(2). - // 6. Call ! CreateDataProperty(array, "0", key). - // 7. Call ! CreateDataProperty(array, "1", value). - // 8. result is array. - result = pair - break + var customInspect = has(opts, 'customInspect') ? opts.customInspect : true; + if (typeof customInspect !== 'boolean' && customInspect !== 'symbol') { + throw new TypeError('option "customInspect", if provided, must be `true`, `false`, or `\'symbol\'`'); } - } - // 2. Return CreateIterResultObject(result, false). - return { value: result, done: false } -} + if ( + has(opts, 'indent') + && opts.indent !== null + && opts.indent !== '\t' + && !(parseInt(opts.indent, 10) === opts.indent && opts.indent > 0) + ) { + throw new TypeError('option "indent" must be "\\t", an integer > 0, or `null`'); + } + if (has(opts, 'numericSeparator') && typeof opts.numericSeparator !== 'boolean') { + throw new TypeError('option "numericSeparator", if provided, must be `true` or `false`'); + } + var numericSeparator = opts.numericSeparator; -/** - * @see https://fetch.spec.whatwg.org/#body-fully-read - */ -async function fullyReadBody (body, processBody, processBodyError) { - // 1. If taskDestination is null, then set taskDestination to - // the result of starting a new parallel queue. + if (typeof obj === 'undefined') { + return 'undefined'; + } + if (obj === null) { + return 'null'; + } + if (typeof obj === 'boolean') { + return obj ? 'true' : 'false'; + } - // 2. Let successSteps given a byte sequence bytes be to queue a - // fetch task to run processBody given bytes, with taskDestination. - const successSteps = processBody + if (typeof obj === 'string') { + return inspectString(obj, opts); + } + if (typeof obj === 'number') { + if (obj === 0) { + return Infinity / obj > 0 ? '0' : '-0'; + } + var str = String(obj); + return numericSeparator ? addNumericSeparator(obj, str) : str; + } + if (typeof obj === 'bigint') { + var bigIntStr = String(obj) + 'n'; + return numericSeparator ? addNumericSeparator(obj, bigIntStr) : bigIntStr; + } - // 3. Let errorSteps be to queue a fetch task to run processBodyError, - // with taskDestination. - const errorSteps = processBodyError + var maxDepth = typeof opts.depth === 'undefined' ? 5 : opts.depth; + if (typeof depth === 'undefined') { depth = 0; } + if (depth >= maxDepth && maxDepth > 0 && typeof obj === 'object') { + return isArray(obj) ? '[Array]' : '[Object]'; + } - // 4. Let reader be the result of getting a reader for body’s stream. - // If that threw an exception, then run errorSteps with that - // exception and return. - let reader + var indent = getIndent(opts, depth); - try { - reader = body.stream.getReader() - } catch (e) { - errorSteps(e) - return - } + if (typeof seen === 'undefined') { + seen = []; + } else if (indexOf(seen, obj) >= 0) { + return '[Circular]'; + } - // 5. Read all bytes from reader, given successSteps and errorSteps. - try { - const result = await readAllBytes(reader) - successSteps(result) - } catch (e) { - errorSteps(e) - } + function inspect(value, from, noIndent) { + if (from) { + seen = $arrSlice.call(seen); + seen.push(from); + } + if (noIndent) { + var newOpts = { + depth: opts.depth + }; + if (has(opts, 'quoteStyle')) { + newOpts.quoteStyle = opts.quoteStyle; + } + return inspect_(value, newOpts, depth + 1, seen); + } + return inspect_(value, opts, depth + 1, seen); + } + + if (typeof obj === 'function' && !isRegExp(obj)) { // in older engines, regexes are callable + var name = nameOf(obj); + var keys = arrObjKeys(obj, inspect); + return '[Function' + (name ? ': ' + name : ' (anonymous)') + ']' + (keys.length > 0 ? ' { ' + $join.call(keys, ', ') + ' }' : ''); + } + if (isSymbol(obj)) { + var symString = hasShammedSymbols ? $replace.call(String(obj), /^(Symbol\(.*\))_[^)]*$/, '$1') : symToString.call(obj); + return typeof obj === 'object' && !hasShammedSymbols ? markBoxed(symString) : symString; + } + if (isElement(obj)) { + var s = '<' + $toLowerCase.call(String(obj.nodeName)); + var attrs = obj.attributes || []; + for (var i = 0; i < attrs.length; i++) { + s += ' ' + attrs[i].name + '=' + wrapQuotes(quote(attrs[i].value), 'double', opts); + } + s += '>'; + if (obj.childNodes && obj.childNodes.length) { s += '...'; } + s += ''; + return s; + } + if (isArray(obj)) { + if (obj.length === 0) { return '[]'; } + var xs = arrObjKeys(obj, inspect); + if (indent && !singleLineValues(xs)) { + return '[' + indentedJoin(xs, indent) + ']'; + } + return '[ ' + $join.call(xs, ', ') + ' ]'; + } + if (isError(obj)) { + var parts = arrObjKeys(obj, inspect); + if (!('cause' in Error.prototype) && 'cause' in obj && !isEnumerable.call(obj, 'cause')) { + return '{ [' + String(obj) + '] ' + $join.call($concat.call('[cause]: ' + inspect(obj.cause), parts), ', ') + ' }'; + } + if (parts.length === 0) { return '[' + String(obj) + ']'; } + return '{ [' + String(obj) + '] ' + $join.call(parts, ', ') + ' }'; + } + if (typeof obj === 'object' && customInspect) { + if (inspectSymbol && typeof obj[inspectSymbol] === 'function' && utilInspect) { + return utilInspect(obj, { depth: maxDepth - depth }); + } else if (customInspect !== 'symbol' && typeof obj.inspect === 'function') { + return obj.inspect(); + } + } + if (isMap(obj)) { + var mapParts = []; + if (mapForEach) { + mapForEach.call(obj, function (value, key) { + mapParts.push(inspect(key, obj, true) + ' => ' + inspect(value, obj)); + }); + } + return collectionOf('Map', mapSize.call(obj), mapParts, indent); + } + if (isSet(obj)) { + var setParts = []; + if (setForEach) { + setForEach.call(obj, function (value) { + setParts.push(inspect(value, obj)); + }); + } + return collectionOf('Set', setSize.call(obj), setParts, indent); + } + if (isWeakMap(obj)) { + return weakCollectionOf('WeakMap'); + } + if (isWeakSet(obj)) { + return weakCollectionOf('WeakSet'); + } + if (isWeakRef(obj)) { + return weakCollectionOf('WeakRef'); + } + if (isNumber(obj)) { + return markBoxed(inspect(Number(obj))); + } + if (isBigInt(obj)) { + return markBoxed(inspect(bigIntValueOf.call(obj))); + } + if (isBoolean(obj)) { + return markBoxed(booleanValueOf.call(obj)); + } + if (isString(obj)) { + return markBoxed(inspect(String(obj))); + } + // note: in IE 8, sometimes `global !== window` but both are the prototypes of each other + /* eslint-env browser */ + if (typeof window !== 'undefined' && obj === window) { + return '{ [object Window] }'; + } + if (obj === global) { + return '{ [object globalThis] }'; + } + if (!isDate(obj) && !isRegExp(obj)) { + var ys = arrObjKeys(obj, inspect); + var isPlainObject = gPO ? gPO(obj) === Object.prototype : obj instanceof Object || obj.constructor === Object; + var protoTag = obj instanceof Object ? '' : 'null prototype'; + var stringTag = !isPlainObject && toStringTag && Object(obj) === obj && toStringTag in obj ? $slice.call(toStr(obj), 8, -1) : protoTag ? 'Object' : ''; + var constructorTag = isPlainObject || typeof obj.constructor !== 'function' ? '' : obj.constructor.name ? obj.constructor.name + ' ' : ''; + var tag = constructorTag + (stringTag || protoTag ? '[' + $join.call($concat.call([], stringTag || [], protoTag || []), ': ') + '] ' : ''); + if (ys.length === 0) { return tag + '{}'; } + if (indent) { + return tag + '{' + indentedJoin(ys, indent) + '}'; + } + return tag + '{ ' + $join.call(ys, ', ') + ' }'; + } + return String(obj); +}; + +function wrapQuotes(s, defaultStyle, opts) { + var quoteChar = (opts.quoteStyle || defaultStyle) === 'double' ? '"' : "'"; + return quoteChar + s + quoteChar; } -/** @type {ReadableStream} */ -let ReadableStream = globalThis.ReadableStream +function quote(s) { + return $replace.call(String(s), /"/g, '"'); +} -function isReadableStreamLike (stream) { - if (!ReadableStream) { - ReadableStream = (__nccwpck_require__(5356).ReadableStream) - } +function isArray(obj) { return toStr(obj) === '[object Array]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isDate(obj) { return toStr(obj) === '[object Date]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isRegExp(obj) { return toStr(obj) === '[object RegExp]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isError(obj) { return toStr(obj) === '[object Error]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isString(obj) { return toStr(obj) === '[object String]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isNumber(obj) { return toStr(obj) === '[object Number]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } +function isBoolean(obj) { return toStr(obj) === '[object Boolean]' && (!toStringTag || !(typeof obj === 'object' && toStringTag in obj)); } - return stream instanceof ReadableStream || ( - stream[Symbol.toStringTag] === 'ReadableStream' && - typeof stream.tee === 'function' - ) +// Symbol and BigInt do have Symbol.toStringTag by spec, so that can't be used to eliminate false positives +function isSymbol(obj) { + if (hasShammedSymbols) { + return obj && typeof obj === 'object' && obj instanceof Symbol; + } + if (typeof obj === 'symbol') { + return true; + } + if (!obj || typeof obj !== 'object' || !symToString) { + return false; + } + try { + symToString.call(obj); + return true; + } catch (e) {} + return false; } -const MAXIMUM_ARGUMENT_LENGTH = 65535 +function isBigInt(obj) { + if (!obj || typeof obj !== 'object' || !bigIntValueOf) { + return false; + } + try { + bigIntValueOf.call(obj); + return true; + } catch (e) {} + return false; +} -/** - * @see https://infra.spec.whatwg.org/#isomorphic-decode - * @param {number[]|Uint8Array} input - */ -function isomorphicDecode (input) { - // 1. To isomorphic decode a byte sequence input, return a string whose code point - // length is equal to input’s length and whose code points have the same values - // as the values of input’s bytes, in the same order. +var hasOwn = Object.prototype.hasOwnProperty || function (key) { return key in this; }; +function has(obj, key) { + return hasOwn.call(obj, key); +} - if (input.length < MAXIMUM_ARGUMENT_LENGTH) { - return String.fromCharCode(...input) - } +function toStr(obj) { + return objectToString.call(obj); +} - return input.reduce((previous, current) => previous + String.fromCharCode(current), '') +function nameOf(f) { + if (f.name) { return f.name; } + var m = $match.call(functionToString.call(f), /^function\s*([\w$]+)/); + if (m) { return m[1]; } + return null; } -/** - * @param {ReadableStreamController} controller - */ -function readableStreamClose (controller) { - try { - controller.close() - } catch (err) { - // TODO: add comment explaining why this error occurs. - if (!err.message.includes('Controller is already closed')) { - throw err +function indexOf(xs, x) { + if (xs.indexOf) { return xs.indexOf(x); } + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) { return i; } } - } + return -1; } -/** - * @see https://infra.spec.whatwg.org/#isomorphic-encode - * @param {string} input - */ -function isomorphicEncode (input) { - // 1. Assert: input contains no code points greater than U+00FF. - for (let i = 0; i < input.length; i++) { - assert(input.charCodeAt(i) <= 0xFF) - } +function isMap(x) { + if (!mapSize || !x || typeof x !== 'object') { + return false; + } + try { + mapSize.call(x); + try { + setSize.call(x); + } catch (s) { + return true; + } + return x instanceof Map; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} - // 2. Return a byte sequence whose length is equal to input’s code - // point length and whose bytes have the same values as the - // values of input’s code points, in the same order - return input +function isWeakMap(x) { + if (!weakMapHas || !x || typeof x !== 'object') { + return false; + } + try { + weakMapHas.call(x, weakMapHas); + try { + weakSetHas.call(x, weakSetHas); + } catch (s) { + return true; + } + return x instanceof WeakMap; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; } -/** - * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes - * @see https://streams.spec.whatwg.org/#read-loop - * @param {ReadableStreamDefaultReader} reader - */ -async function readAllBytes (reader) { - const bytes = [] - let byteLength = 0 +function isWeakRef(x) { + if (!weakRefDeref || !x || typeof x !== 'object') { + return false; + } + try { + weakRefDeref.call(x); + return true; + } catch (e) {} + return false; +} - while (true) { - const { done, value: chunk } = await reader.read() +function isSet(x) { + if (!setSize || !x || typeof x !== 'object') { + return false; + } + try { + setSize.call(x); + try { + mapSize.call(x); + } catch (m) { + return true; + } + return x instanceof Set; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} - if (done) { - // 1. Call successSteps with bytes. - return Buffer.concat(bytes, byteLength) +function isWeakSet(x) { + if (!weakSetHas || !x || typeof x !== 'object') { + return false; } + try { + weakSetHas.call(x, weakSetHas); + try { + weakMapHas.call(x, weakMapHas); + } catch (s) { + return true; + } + return x instanceof WeakSet; // core-js workaround, pre-v2.5.0 + } catch (e) {} + return false; +} - // 1. If chunk is not a Uint8Array object, call failureSteps - // with a TypeError and abort these steps. - if (!isUint8Array(chunk)) { - throw new TypeError('Received non-Uint8Array chunk') +function isElement(x) { + if (!x || typeof x !== 'object') { return false; } + if (typeof HTMLElement !== 'undefined' && x instanceof HTMLElement) { + return true; } + return typeof x.nodeName === 'string' && typeof x.getAttribute === 'function'; +} - // 2. Append the bytes represented by chunk to bytes. - bytes.push(chunk) - byteLength += chunk.length +function inspectString(str, opts) { + if (str.length > opts.maxStringLength) { + var remaining = str.length - opts.maxStringLength; + var trailer = '... ' + remaining + ' more character' + (remaining > 1 ? 's' : ''); + return inspectString($slice.call(str, 0, opts.maxStringLength), opts) + trailer; + } + // eslint-disable-next-line no-control-regex + var s = $replace.call($replace.call(str, /(['\\])/g, '\\$1'), /[\x00-\x1f]/g, lowbyte); + return wrapQuotes(s, 'single', opts); +} - // 3. Read-loop given reader, bytes, successSteps, and failureSteps. - } +function lowbyte(c) { + var n = c.charCodeAt(0); + var x = { + 8: 'b', + 9: 't', + 10: 'n', + 12: 'f', + 13: 'r' + }[n]; + if (x) { return '\\' + x; } + return '\\x' + (n < 0x10 ? '0' : '') + $toUpperCase.call(n.toString(16)); } -/** - * @see https://fetch.spec.whatwg.org/#is-local - * @param {URL} url - */ -function urlIsLocal (url) { - assert('protocol' in url) // ensure it's a url object +function markBoxed(str) { + return 'Object(' + str + ')'; +} - const protocol = url.protocol +function weakCollectionOf(type) { + return type + ' { ? }'; +} - return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:' +function collectionOf(type, size, entries, indent) { + var joinedEntries = indent ? indentedJoin(entries, indent) : $join.call(entries, ', '); + return type + ' (' + size + ') {' + joinedEntries + '}'; } -/** - * @param {string|URL} url - */ -function urlHasHttpsScheme (url) { - if (typeof url === 'string') { - return url.startsWith('https:') - } +function singleLineValues(xs) { + for (var i = 0; i < xs.length; i++) { + if (indexOf(xs[i], '\n') >= 0) { + return false; + } + } + return true; +} - return url.protocol === 'https:' +function getIndent(opts, depth) { + var baseIndent; + if (opts.indent === '\t') { + baseIndent = '\t'; + } else if (typeof opts.indent === 'number' && opts.indent > 0) { + baseIndent = $join.call(Array(opts.indent + 1), ' '); + } else { + return null; + } + return { + base: baseIndent, + prev: $join.call(Array(depth + 1), baseIndent) + }; } -/** - * @see https://fetch.spec.whatwg.org/#http-scheme - * @param {URL} url - */ -function urlIsHttpHttpsScheme (url) { - assert('protocol' in url) // ensure it's a url object +function indentedJoin(xs, indent) { + if (xs.length === 0) { return ''; } + var lineJoiner = '\n' + indent.prev + indent.base; + return lineJoiner + $join.call(xs, ',' + lineJoiner) + '\n' + indent.prev; +} - const protocol = url.protocol +function arrObjKeys(obj, inspect) { + var isArr = isArray(obj); + var xs = []; + if (isArr) { + xs.length = obj.length; + for (var i = 0; i < obj.length; i++) { + xs[i] = has(obj, i) ? inspect(obj[i], obj) : ''; + } + } + var syms = typeof gOPS === 'function' ? gOPS(obj) : []; + var symMap; + if (hasShammedSymbols) { + symMap = {}; + for (var k = 0; k < syms.length; k++) { + symMap['$' + syms[k]] = syms[k]; + } + } - return protocol === 'http:' || protocol === 'https:' -} - -/** - * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0. - */ -const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key)) - -module.exports = { - isAborted, - isCancelled, - createDeferredPromise, - ReadableStreamFrom, - toUSVString, - tryUpgradeRequestToAPotentiallyTrustworthyURL, - coarsenedSharedCurrentTime, - determineRequestsReferrer, - makePolicyContainer, - clonePolicyContainer, - appendFetchMetadata, - appendRequestOriginHeader, - TAOCheck, - corsCheck, - crossOriginResourcePolicyCheck, - createOpaqueTimingInfo, - setRequestReferrerPolicyOnRedirect, - isValidHTTPToken, - requestBadPort, - requestCurrentURL, - responseURL, - responseLocationURL, - isBlobLike, - isURLPotentiallyTrustworthy, - isValidReasonPhrase, - sameOrigin, - normalizeMethod, - serializeJavascriptValueToJSONString, - makeIterator, - isValidHeaderName, - isValidHeaderValue, - hasOwn, - isErrorLike, - fullyReadBody, - bytesMatch, - isReadableStreamLike, - readableStreamClose, - isomorphicEncode, - isomorphicDecode, - urlIsLocal, - urlHasHttpsScheme, - urlIsHttpHttpsScheme, - readAllBytes, - normalizeMethodRecord, - parseMetadata + for (var key in obj) { // eslint-disable-line no-restricted-syntax + if (!has(obj, key)) { continue; } // eslint-disable-line no-restricted-syntax, no-continue + if (isArr && String(Number(key)) === key && key < obj.length) { continue; } // eslint-disable-line no-restricted-syntax, no-continue + if (hasShammedSymbols && symMap['$' + key] instanceof Symbol) { + // this is to prevent shammed Symbols, which are stored as strings, from being included in the string key section + continue; // eslint-disable-line no-restricted-syntax, no-continue + } else if ($test.call(/[^\w$]/, key)) { + xs.push(inspect(key, obj) + ': ' + inspect(obj[key], obj)); + } else { + xs.push(key + ': ' + inspect(obj[key], obj)); + } + } + if (typeof gOPS === 'function') { + for (var j = 0; j < syms.length; j++) { + if (isEnumerable.call(obj, syms[j])) { + xs.push('[' + inspect(syms[j]) + ']: ' + inspect(obj[syms[j]], obj)); + } + } + } + return xs; } /***/ }), -/***/ 1744: +/***/ 37265: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; - +module.exports = __nccwpck_require__(73837).inspect; -const { types } = __nccwpck_require__(3837) -const { hasOwn, toUSVString } = __nccwpck_require__(2538) -/** @type {import('../../types/webidl').Webidl} */ -const webidl = {} -webidl.converters = {} -webidl.util = {} -webidl.errors = {} +/***/ }), -webidl.errors.exception = function (message) { - return new TypeError(`${message.header}: ${message.message}`) -} +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -webidl.errors.conversionFailed = function (context) { - const plural = context.types.length === 1 ? '' : ' one of' - const message = - `${context.argument} could not be converted to` + - `${plural}: ${context.types.join(', ')}.` +var wrappy = __nccwpck_require__(62940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) - return webidl.errors.exception({ - header: context.prefix, - message +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true }) -} -webidl.errors.invalidArgument = function (context) { - return webidl.errors.exception({ - header: context.prefix, - message: `"${context.value}" is an invalid ${context.type}.` + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true }) -} +}) -// https://webidl.spec.whatwg.org/#implements -webidl.brandCheck = function (V, I, opts = undefined) { - if (opts?.strict !== false && !(V instanceof I)) { - throw new TypeError('Illegal invocation') - } else { - return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag] +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) } + f.called = false + return f } -webidl.argumentLengthCheck = function ({ length }, min, ctx) { - if (length < min) { - throw webidl.errors.exception({ - message: `${min} argument${min !== 1 ? 's' : ''} required, ` + - `but${length ? ' only' : ''} ${length} found.`, - ...ctx - }) +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f } -webidl.illegalConstructor = function () { - throw webidl.errors.exception({ - header: 'TypeError', - message: 'Illegal constructor' - }) -} -// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values -webidl.util.Type = function (V) { - switch (typeof V) { - case 'undefined': return 'Undefined' - case 'boolean': return 'Boolean' - case 'string': return 'String' - case 'symbol': return 'Symbol' - case 'number': return 'Number' - case 'bigint': return 'BigInt' - case 'function': - case 'object': { - if (V === null) { - return 'Null' - } +/***/ }), - return 'Object' - } - } -} +/***/ 54824: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint -webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) { - let upperBound - let lowerBound +"use strict"; - // 1. If bitLength is 64, then: - if (bitLength === 64) { - // 1. Let upperBound be 2^53 − 1. - upperBound = Math.pow(2, 53) - 1 +var os = __nccwpck_require__(22037); +var osxRelease = __nccwpck_require__(18344); +var winRelease = __nccwpck_require__(49513); - // 2. If signedness is "unsigned", then let lowerBound be 0. - if (signedness === 'unsigned') { - lowerBound = 0 - } else { - // 3. Otherwise let lowerBound be −2^53 + 1. - lowerBound = Math.pow(-2, 53) + 1 - } - } else if (signedness === 'unsigned') { - // 2. Otherwise, if signedness is "unsigned", then: +module.exports = function (platform, release) { + if (!platform && release) { + throw new Error('You can\'t specify a `release` without specfying `platform`'); + } - // 1. Let lowerBound be 0. - lowerBound = 0 + platform = platform || os.platform(); + release = release || os.release(); - // 2. Let upperBound be 2^bitLength − 1. - upperBound = Math.pow(2, bitLength) - 1 - } else { - // 3. Otherwise: + var id; - // 1. Let lowerBound be -2^bitLength − 1. - lowerBound = Math.pow(-2, bitLength) - 1 + if (platform === 'darwin') { + id = osxRelease(release).name; + return 'OS X' + (id ? ' ' + id : ''); + } - // 2. Let upperBound be 2^bitLength − 1 − 1. - upperBound = Math.pow(2, bitLength - 1) - 1 - } + if (platform === 'linux') { + id = release.replace(/^(\d+\.\d+).*/, '$1'); + return 'Linux' + (id ? ' ' + id : ''); + } - // 4. Let x be ? ToNumber(V). - let x = Number(V) + if (platform === 'win32') { + id = winRelease(release); + return 'Windows' + (id ? ' ' + id : ''); + } - // 5. If x is −0, then set x to +0. - if (x === 0) { - x = 0 - } + return platform; +}; - // 6. If the conversion is to an IDL type associated - // with the [EnforceRange] extended attribute, then: - if (opts.enforceRange === true) { - // 1. If x is NaN, +∞, or −∞, then throw a TypeError. - if ( - Number.isNaN(x) || - x === Number.POSITIVE_INFINITY || - x === Number.NEGATIVE_INFINITY - ) { - throw webidl.errors.exception({ - header: 'Integer conversion', - message: `Could not convert ${V} to an integer.` - }) - } - // 2. Set x to IntegerPart(x). - x = webidl.util.IntegerPart(x) +/***/ }), - // 3. If x < lowerBound or x > upperBound, then - // throw a TypeError. - if (x < lowerBound || x > upperBound) { - throw webidl.errors.exception({ - header: 'Integer conversion', - message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.` - }) - } +/***/ 18344: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 4. Return x. - return x - } +"use strict"; - // 7. If x is not NaN and the conversion is to an IDL - // type associated with the [Clamp] extended - // attribute, then: - if (!Number.isNaN(x) && opts.clamp === true) { - // 1. Set x to min(max(x, lowerBound), upperBound). - x = Math.min(Math.max(x, lowerBound), upperBound) +var os = __nccwpck_require__(22037); + +var nameMap = { + '15': 'El Capitan', + '14': 'Yosemite', + '13': 'Mavericks', + '12': 'Mountain Lion', + '11': 'Lion', + '10': 'Snow Leopard', + '9': 'Leopard', + '8': 'Tiger', + '7': 'Panther', + '6': 'Jaguar', + '5': 'Puma' +}; - // 2. Round x to the nearest integer, choosing the - // even integer if it lies halfway between two, - // and choosing +0 rather than −0. - if (Math.floor(x) % 2 === 0) { - x = Math.floor(x) - } else { - x = Math.ceil(x) - } +module.exports = function (release) { + release = (release || os.release()).split('.')[0]; + return { + name: nameMap[release], + version: '10.' + (Number(release) - 4) + }; +}; - // 3. Return x. - return x - } - // 8. If x is NaN, +0, +∞, or −∞, then return +0. - if ( - Number.isNaN(x) || - (x === 0 && Object.is(0, x)) || - x === Number.POSITIVE_INFINITY || - x === Number.NEGATIVE_INFINITY - ) { - return 0 - } +/***/ }), - // 9. Set x to IntegerPart(x). - x = webidl.util.IntegerPart(x) +/***/ 31726: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // 10. Set x to x modulo 2^bitLength. - x = x % Math.pow(2, bitLength) +"use strict"; +// Top level file is just a mixin of submodules & constants - // 11. If signedness is "signed" and x ≥ 2^bitLength − 1, - // then return x − 2^bitLength. - if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) { - return x - Math.pow(2, bitLength) - } - // 12. Otherwise, return x. - return x -} +var assign = (__nccwpck_require__(5483).assign); -// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart -webidl.util.IntegerPart = function (n) { - // 1. Let r be floor(abs(n)). - const r = Math.floor(Math.abs(n)) +var deflate = __nccwpck_require__(17265); +var inflate = __nccwpck_require__(96522); +var constants = __nccwpck_require__(58282); - // 2. If n < 0, then return -1 × r. - if (n < 0) { - return -1 * r - } +var pako = {}; - // 3. Otherwise, return r. - return r -} +assign(pako, deflate, inflate, constants); -// https://webidl.spec.whatwg.org/#es-sequence -webidl.sequenceConverter = function (converter) { - return (V) => { - // 1. If Type(V) is not Object, throw a TypeError. - if (webidl.util.Type(V) !== 'Object') { - throw webidl.errors.exception({ - header: 'Sequence', - message: `Value of type ${webidl.util.Type(V)} is not an Object.` - }) - } +module.exports = pako; - // 2. Let method be ? GetMethod(V, @@iterator). - /** @type {Generator} */ - const method = V?.[Symbol.iterator]?.() - const seq = [] - // 3. If method is undefined, throw a TypeError. - if ( - method === undefined || - typeof method.next !== 'function' - ) { - throw webidl.errors.exception({ - header: 'Sequence', - message: 'Object is not an iterator.' - }) - } +/***/ }), - // https://webidl.spec.whatwg.org/#create-sequence-from-iterable - while (true) { - const { done, value } = method.next() +/***/ 17265: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - if (done) { - break - } +"use strict"; - seq.push(converter(value)) - } - return seq - } -} -// https://webidl.spec.whatwg.org/#es-to-record -webidl.recordConverter = function (keyConverter, valueConverter) { - return (O) => { - // 1. If Type(O) is not Object, throw a TypeError. - if (webidl.util.Type(O) !== 'Object') { - throw webidl.errors.exception({ - header: 'Record', - message: `Value of type ${webidl.util.Type(O)} is not an Object.` - }) - } +var zlib_deflate = __nccwpck_require__(70978); +var utils = __nccwpck_require__(5483); +var strings = __nccwpck_require__(42380); +var msg = __nccwpck_require__(1890); +var ZStream = __nccwpck_require__(86442); - // 2. Let result be a new empty instance of record. - const result = {} +var toString = Object.prototype.toString; - if (!types.isProxy(O)) { - // Object.keys only returns enumerable properties - const keys = Object.keys(O) +/* Public constants ==========================================================*/ +/* ===========================================================================*/ - for (const key of keys) { - // 1. Let typedKey be key converted to an IDL value of type K. - const typedKey = keyConverter(key) +var Z_NO_FLUSH = 0; +var Z_FINISH = 4; - // 2. Let value be ? Get(O, key). - // 3. Let typedValue be value converted to an IDL value of type V. - const typedValue = valueConverter(O[key]) +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_SYNC_FLUSH = 2; - // 4. Set result[typedKey] to typedValue. - result[typedKey] = typedValue - } +var Z_DEFAULT_COMPRESSION = -1; - // 5. Return result. - return result - } +var Z_DEFAULT_STRATEGY = 0; - // 3. Let keys be ? O.[[OwnPropertyKeys]](). - const keys = Reflect.ownKeys(O) +var Z_DEFLATED = 8; - // 4. For each key of keys. - for (const key of keys) { - // 1. Let desc be ? O.[[GetOwnProperty]](key). - const desc = Reflect.getOwnPropertyDescriptor(O, key) +/* ===========================================================================*/ - // 2. If desc is not undefined and desc.[[Enumerable]] is true: - if (desc?.enumerable) { - // 1. Let typedKey be key converted to an IDL value of type K. - const typedKey = keyConverter(key) - // 2. Let value be ? Get(O, key). - // 3. Let typedValue be value converted to an IDL value of type V. - const typedValue = valueConverter(O[key]) +/** + * class Deflate + * + * Generic JS-style wrapper for zlib calls. If you don't need + * streaming behaviour - use more simple functions: [[deflate]], + * [[deflateRaw]] and [[gzip]]. + **/ - // 4. Set result[typedKey] to typedValue. - result[typedKey] = typedValue - } - } +/* internal + * Deflate.chunks -> Array + * + * Chunks of output data, if [[Deflate#onData]] not overridden. + **/ - // 5. Return result. - return result - } -} +/** + * Deflate.result -> Uint8Array|Array + * + * Compressed result, generated by default [[Deflate#onData]] + * and [[Deflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Deflate#push]] with `Z_FINISH` / `true` param) or if you + * push a chunk with explicit flush (call [[Deflate#push]] with + * `Z_SYNC_FLUSH` param). + **/ -webidl.interfaceConverter = function (i) { - return (V, opts = {}) => { - if (opts.strict !== false && !(V instanceof i)) { - throw webidl.errors.exception({ - header: i.name, - message: `Expected ${V} to be an instance of ${i.name}.` - }) - } +/** + * Deflate.err -> Number + * + * Error code after deflate finished. 0 (Z_OK) on success. + * You will not need it in real life, because deflate errors + * are possible only on wrong options or bad `onData` / `onEnd` + * custom handlers. + **/ - return V +/** + * Deflate.msg -> String + * + * Error message, if [[Deflate.err]] != 0 + **/ + + +/** + * new Deflate(options) + * - options (Object): zlib deflate options. + * + * Creates new deflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `level` + * - `windowBits` + * - `memLevel` + * - `strategy` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw deflate + * - `gzip` (Boolean) - create gzip wrapper + * - `to` (String) - if equal to 'string', then result will be "binary string" + * (each char code [0..255]) + * - `header` (Object) - custom header for gzip + * - `text` (Boolean) - true if compressed data believed to be text + * - `time` (Number) - modification time, unix timestamp + * - `os` (Number) - operation system code + * - `extra` (Array) - array of bytes with extra data (max 65536) + * - `name` (String) - file name (binary string) + * - `comment` (String) - comment (binary string) + * - `hcrc` (Boolean) - true if header crc should be added + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * var deflate = new pako.Deflate({ level: 3}); + * + * deflate.push(chunk1, false); + * deflate.push(chunk2, true); // true -> last chunk + * + * if (deflate.err) { throw new Error(deflate.err); } + * + * console.log(deflate.result); + * ``` + **/ +function Deflate(options) { + if (!(this instanceof Deflate)) return new Deflate(options); + + this.options = utils.assign({ + level: Z_DEFAULT_COMPRESSION, + method: Z_DEFLATED, + chunkSize: 16384, + windowBits: 15, + memLevel: 8, + strategy: Z_DEFAULT_STRATEGY, + to: '' + }, options || {}); + + var opt = this.options; + + if (opt.raw && (opt.windowBits > 0)) { + opt.windowBits = -opt.windowBits; } -} -webidl.dictionaryConverter = function (converters) { - return (dictionary) => { - const type = webidl.util.Type(dictionary) - const dict = {} + else if (opt.gzip && (opt.windowBits > 0) && (opt.windowBits < 16)) { + opt.windowBits += 16; + } - if (type === 'Null' || type === 'Undefined') { - return dict - } else if (type !== 'Object') { - throw webidl.errors.exception({ - header: 'Dictionary', - message: `Expected ${dictionary} to be one of: Null, Undefined, Object.` - }) - } + this.err = 0; // error code, if happens (0 = Z_OK) + this.msg = ''; // error message + this.ended = false; // used to avoid multiple onEnd() calls + this.chunks = []; // chunks of compressed data - for (const options of converters) { - const { key, defaultValue, required, converter } = options + this.strm = new ZStream(); + this.strm.avail_out = 0; - if (required === true) { - if (!hasOwn(dictionary, key)) { - throw webidl.errors.exception({ - header: 'Dictionary', - message: `Missing required key "${key}".` - }) - } - } + var status = zlib_deflate.deflateInit2( + this.strm, + opt.level, + opt.method, + opt.windowBits, + opt.memLevel, + opt.strategy + ); - let value = dictionary[key] - const hasDefault = hasOwn(options, 'defaultValue') + if (status !== Z_OK) { + throw new Error(msg[status]); + } - // Only use defaultValue if value is undefined and - // a defaultValue options was provided. - if (hasDefault && value !== null) { - value = value ?? defaultValue - } + if (opt.header) { + zlib_deflate.deflateSetHeader(this.strm, opt.header); + } - // A key can be optional and have no default value. - // When this happens, do not perform a conversion, - // and do not assign the key a value. - if (required || hasDefault || value !== undefined) { - value = converter(value) + if (opt.dictionary) { + var dict; + // Convert data if needed + if (typeof opt.dictionary === 'string') { + // If we need to compress text, change encoding to utf8. + dict = strings.string2buf(opt.dictionary); + } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') { + dict = new Uint8Array(opt.dictionary); + } else { + dict = opt.dictionary; + } - if ( - options.allowedValues && - !options.allowedValues.includes(value) - ) { - throw webidl.errors.exception({ - header: 'Dictionary', - message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.` - }) - } + status = zlib_deflate.deflateSetDictionary(this.strm, dict); - dict[key] = value - } + if (status !== Z_OK) { + throw new Error(msg[status]); } - return dict + this._dict_set = true; } } -webidl.nullableConverter = function (converter) { - return (V) => { - if (V === null) { - return V +/** + * Deflate#push(data[, mode]) -> Boolean + * - data (Uint8Array|Array|ArrayBuffer|String): input data. Strings will be + * converted to utf8 byte sequence. + * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` means Z_FINISH. + * + * Sends input data to deflate pipe, generating [[Deflate#onData]] calls with + * new compressed chunks. Returns `true` on success. The last data block must have + * mode Z_FINISH (or `true`). That will flush internal pending buffers and call + * [[Deflate#onEnd]]. For interim explicit flushes (without ending the stream) you + * can use mode Z_SYNC_FLUSH, keeping the compression context. + * + * On fail call [[Deflate#onEnd]] with error code and return false. + * + * We strongly recommend to use `Uint8Array` on input for best speed (output + * array format is detected automatically). Also, don't skip last param and always + * use the same type in your code (boolean or number). That will improve JS speed. + * + * For regular `Array`-s make sure all elements are [0..255]. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ +Deflate.prototype.push = function (data, mode) { + var strm = this.strm; + var chunkSize = this.options.chunkSize; + var status, _mode; + + if (this.ended) { return false; } + + _mode = (mode === ~~mode) ? mode : ((mode === true) ? Z_FINISH : Z_NO_FLUSH); + + // Convert data if needed + if (typeof data === 'string') { + // If we need to compress text, change encoding to utf8. + strm.input = strings.string2buf(data); + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data); + } else { + strm.input = data; + } + + strm.next_in = 0; + strm.avail_in = strm.input.length; + + do { + if (strm.avail_out === 0) { + strm.output = new utils.Buf8(chunkSize); + strm.next_out = 0; + strm.avail_out = chunkSize; } + status = zlib_deflate.deflate(strm, _mode); /* no bad return value */ - return converter(V) - } -} + if (status !== Z_STREAM_END && status !== Z_OK) { + this.onEnd(status); + this.ended = true; + return false; + } + if (strm.avail_out === 0 || (strm.avail_in === 0 && (_mode === Z_FINISH || _mode === Z_SYNC_FLUSH))) { + if (this.options.to === 'string') { + this.onData(strings.buf2binstring(utils.shrinkBuf(strm.output, strm.next_out))); + } else { + this.onData(utils.shrinkBuf(strm.output, strm.next_out)); + } + } + } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== Z_STREAM_END); -// https://webidl.spec.whatwg.org/#es-DOMString -webidl.converters.DOMString = function (V, opts = {}) { - // 1. If V is null and the conversion is to an IDL type - // associated with the [LegacyNullToEmptyString] - // extended attribute, then return the DOMString value - // that represents the empty string. - if (V === null && opts.legacyNullToEmptyString) { - return '' + // Finalize on the last chunk. + if (_mode === Z_FINISH) { + status = zlib_deflate.deflateEnd(this.strm); + this.onEnd(status); + this.ended = true; + return status === Z_OK; } - // 2. Let x be ? ToString(V). - if (typeof V === 'symbol') { - throw new TypeError('Could not convert argument of type symbol to string.') + // callback interim results if Z_SYNC_FLUSH. + if (_mode === Z_SYNC_FLUSH) { + this.onEnd(Z_OK); + strm.avail_out = 0; + return true; } - // 3. Return the IDL DOMString value that represents the - // same sequence of code units as the one the - // ECMAScript String value x represents. - return String(V) -} + return true; +}; -// https://webidl.spec.whatwg.org/#es-ByteString -webidl.converters.ByteString = function (V) { - // 1. Let x be ? ToString(V). - // Note: DOMString converter perform ? ToString(V) - const x = webidl.converters.DOMString(V) - // 2. If the value of any element of x is greater than - // 255, then throw a TypeError. - for (let index = 0; index < x.length; index++) { - if (x.charCodeAt(index) > 255) { - throw new TypeError( - 'Cannot convert argument to a ByteString because the character at ' + - `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` - ) +/** + * Deflate#onData(chunk) -> Void + * - chunk (Uint8Array|Array|String): output data. Type of array depends + * on js engine support. When string output requested, each chunk + * will be string. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ +Deflate.prototype.onData = function (chunk) { + this.chunks.push(chunk); +}; + + +/** + * Deflate#onEnd(status) -> Void + * - status (Number): deflate status. 0 (Z_OK) on success, + * other if not. + * + * Called once after you tell deflate that the input stream is + * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) + * or if an error happened. By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ +Deflate.prototype.onEnd = function (status) { + // On success - join + if (status === Z_OK) { + if (this.options.to === 'string') { + this.result = this.chunks.join(''); + } else { + this.result = utils.flattenChunks(this.chunks); } } + this.chunks = []; + this.err = status; + this.msg = this.strm.msg; +}; - // 3. Return an IDL ByteString value whose length is the - // length of x, and where the value of each element is - // the value of the corresponding element of x. - return x -} -// https://webidl.spec.whatwg.org/#es-USVString -webidl.converters.USVString = toUSVString +/** + * deflate(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * Compress `data` with deflate algorithm and `options`. + * + * Supported options are: + * + * - level + * - windowBits + * - memLevel + * - strategy + * - dictionary + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * - `to` (String) - if equal to 'string', then result will be "binary string" + * (each char code [0..255]) + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , data = Uint8Array([1,2,3,4,5,6,7,8,9]); + * + * console.log(pako.deflate(data)); + * ``` + **/ +function deflate(input, options) { + var deflator = new Deflate(options); -// https://webidl.spec.whatwg.org/#es-boolean -webidl.converters.boolean = function (V) { - // 1. Let x be the result of computing ToBoolean(V). - const x = Boolean(V) + deflator.push(input, true); - // 2. Return the IDL boolean value that is the one that represents - // the same truth value as the ECMAScript Boolean value x. - return x -} + // That will never happens, if you don't cheat with options :) + if (deflator.err) { throw deflator.msg || msg[deflator.err]; } -// https://webidl.spec.whatwg.org/#es-any -webidl.converters.any = function (V) { - return V + return deflator.result; } -// https://webidl.spec.whatwg.org/#es-long-long -webidl.converters['long long'] = function (V) { - // 1. Let x be ? ConvertToInt(V, 64, "signed"). - const x = webidl.util.ConvertToInt(V, 64, 'signed') - // 2. Return the IDL long long value that represents - // the same numeric value as x. - return x +/** + * deflateRaw(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ +function deflateRaw(input, options) { + options = options || {}; + options.raw = true; + return deflate(input, options); } -// https://webidl.spec.whatwg.org/#es-unsigned-long-long -webidl.converters['unsigned long long'] = function (V) { - // 1. Let x be ? ConvertToInt(V, 64, "unsigned"). - const x = webidl.util.ConvertToInt(V, 64, 'unsigned') - // 2. Return the IDL unsigned long long value that - // represents the same numeric value as x. - return x +/** + * gzip(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to compress. + * - options (Object): zlib deflate options. + * + * The same as [[deflate]], but create gzip wrapper instead of + * deflate one. + **/ +function gzip(input, options) { + options = options || {}; + options.gzip = true; + return deflate(input, options); } -// https://webidl.spec.whatwg.org/#es-unsigned-long -webidl.converters['unsigned long'] = function (V) { - // 1. Let x be ? ConvertToInt(V, 32, "unsigned"). - const x = webidl.util.ConvertToInt(V, 32, 'unsigned') - // 2. Return the IDL unsigned long value that - // represents the same numeric value as x. - return x -} +exports.Deflate = Deflate; +exports.deflate = deflate; +exports.deflateRaw = deflateRaw; +exports.gzip = gzip; -// https://webidl.spec.whatwg.org/#es-unsigned-short -webidl.converters['unsigned short'] = function (V, opts) { - // 1. Let x be ? ConvertToInt(V, 16, "unsigned"). - const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts) - // 2. Return the IDL unsigned short value that represents - // the same numeric value as x. - return x +/***/ }), + +/***/ 96522: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + + +var zlib_inflate = __nccwpck_require__(90409); +var utils = __nccwpck_require__(5483); +var strings = __nccwpck_require__(42380); +var c = __nccwpck_require__(58282); +var msg = __nccwpck_require__(1890); +var ZStream = __nccwpck_require__(86442); +var GZheader = __nccwpck_require__(35105); + +var toString = Object.prototype.toString; + +/** + * class Inflate + * + * Generic JS-style wrapper for zlib calls. If you don't need + * streaming behaviour - use more simple functions: [[inflate]] + * and [[inflateRaw]]. + **/ + +/* internal + * inflate.chunks -> Array + * + * Chunks of output data, if [[Inflate#onData]] not overridden. + **/ + +/** + * Inflate.result -> Uint8Array|Array|String + * + * Uncompressed result, generated by default [[Inflate#onData]] + * and [[Inflate#onEnd]] handlers. Filled after you push last chunk + * (call [[Inflate#push]] with `Z_FINISH` / `true` param) or if you + * push a chunk with explicit flush (call [[Inflate#push]] with + * `Z_SYNC_FLUSH` param). + **/ + +/** + * Inflate.err -> Number + * + * Error code after inflate finished. 0 (Z_OK) on success. + * Should be checked if broken data possible. + **/ + +/** + * Inflate.msg -> String + * + * Error message, if [[Inflate.err]] != 0 + **/ + + +/** + * new Inflate(options) + * - options (Object): zlib inflate options. + * + * Creates new inflator instance with specified params. Throws exception + * on bad params. Supported options: + * + * - `windowBits` + * - `dictionary` + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information on these. + * + * Additional options, for internal needs: + * + * - `chunkSize` - size of generated data chunks (16K by default) + * - `raw` (Boolean) - do raw inflate + * - `to` (String) - if equal to 'string', then result will be converted + * from utf8 to utf16 (javascript) string. When string output requested, + * chunk length can differ from `chunkSize`, depending on content. + * + * By default, when no options set, autodetect deflate/gzip data format via + * wrapper header. + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9]) + * , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]); + * + * var inflate = new pako.Inflate({ level: 3}); + * + * inflate.push(chunk1, false); + * inflate.push(chunk2, true); // true -> last chunk + * + * if (inflate.err) { throw new Error(inflate.err); } + * + * console.log(inflate.result); + * ``` + **/ +function Inflate(options) { + if (!(this instanceof Inflate)) return new Inflate(options); + + this.options = utils.assign({ + chunkSize: 16384, + windowBits: 0, + to: '' + }, options || {}); + + var opt = this.options; + + // Force window size for `raw` data, if not set directly, + // because we have no header for autodetect. + if (opt.raw && (opt.windowBits >= 0) && (opt.windowBits < 16)) { + opt.windowBits = -opt.windowBits; + if (opt.windowBits === 0) { opt.windowBits = -15; } + } + + // If `windowBits` not defined (and mode not raw) - set autodetect flag for gzip/deflate + if ((opt.windowBits >= 0) && (opt.windowBits < 16) && + !(options && options.windowBits)) { + opt.windowBits += 32; + } + + // Gzip header has no info about windows size, we can do autodetect only + // for deflate. So, if window size not set, force it to max when gzip possible + if ((opt.windowBits > 15) && (opt.windowBits < 48)) { + // bit 3 (16) -> gzipped data + // bit 4 (32) -> autodetect gzip/deflate + if ((opt.windowBits & 15) === 0) { + opt.windowBits |= 15; + } + } + + this.err = 0; // error code, if happens (0 = Z_OK) + this.msg = ''; // error message + this.ended = false; // used to avoid multiple onEnd() calls + this.chunks = []; // chunks of compressed data + + this.strm = new ZStream(); + this.strm.avail_out = 0; + + var status = zlib_inflate.inflateInit2( + this.strm, + opt.windowBits + ); + + if (status !== c.Z_OK) { + throw new Error(msg[status]); + } + + this.header = new GZheader(); + + zlib_inflate.inflateGetHeader(this.strm, this.header); + + // Setup dictionary + if (opt.dictionary) { + // Convert data if needed + if (typeof opt.dictionary === 'string') { + opt.dictionary = strings.string2buf(opt.dictionary); + } else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') { + opt.dictionary = new Uint8Array(opt.dictionary); + } + if (opt.raw) { //In raw mode we need to set the dictionary early + status = zlib_inflate.inflateSetDictionary(this.strm, opt.dictionary); + if (status !== c.Z_OK) { + throw new Error(msg[status]); + } + } + } } -// https://webidl.spec.whatwg.org/#idl-ArrayBuffer -webidl.converters.ArrayBuffer = function (V, opts = {}) { - // 1. If Type(V) is not Object, or V does not have an - // [[ArrayBufferData]] internal slot, then throw a - // TypeError. - // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances - // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances - if ( - webidl.util.Type(V) !== 'Object' || - !types.isAnyArrayBuffer(V) - ) { - throw webidl.errors.conversionFailed({ - prefix: `${V}`, - argument: `${V}`, - types: ['ArrayBuffer'] - }) +/** + * Inflate#push(data[, mode]) -> Boolean + * - data (Uint8Array|Array|ArrayBuffer|String): input data + * - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes. + * See constants. Skipped or `false` means Z_NO_FLUSH, `true` means Z_FINISH. + * + * Sends input data to inflate pipe, generating [[Inflate#onData]] calls with + * new output chunks. Returns `true` on success. The last data block must have + * mode Z_FINISH (or `true`). That will flush internal pending buffers and call + * [[Inflate#onEnd]]. For interim explicit flushes (without ending the stream) you + * can use mode Z_SYNC_FLUSH, keeping the decompression context. + * + * On fail call [[Inflate#onEnd]] with error code and return false. + * + * We strongly recommend to use `Uint8Array` on input for best speed (output + * format is detected automatically). Also, don't skip last param and always + * use the same type in your code (boolean or number). That will improve JS speed. + * + * For regular `Array`-s make sure all elements are [0..255]. + * + * ##### Example + * + * ```javascript + * push(chunk, false); // push one of data chunks + * ... + * push(chunk, true); // push last chunk + * ``` + **/ +Inflate.prototype.push = function (data, mode) { + var strm = this.strm; + var chunkSize = this.options.chunkSize; + var dictionary = this.options.dictionary; + var status, _mode; + var next_out_utf8, tail, utf8str; + + // Flag to properly process Z_BUF_ERROR on testing inflate call + // when we check that all output data was flushed. + var allowBufError = false; + + if (this.ended) { return false; } + _mode = (mode === ~~mode) ? mode : ((mode === true) ? c.Z_FINISH : c.Z_NO_FLUSH); + + // Convert data if needed + if (typeof data === 'string') { + // Only binary strings can be decompressed on practice + strm.input = strings.binstring2buf(data); + } else if (toString.call(data) === '[object ArrayBuffer]') { + strm.input = new Uint8Array(data); + } else { + strm.input = data; } - // 2. If the conversion is not to an IDL type associated - // with the [AllowShared] extended attribute, and - // IsSharedArrayBuffer(V) is true, then throw a - // TypeError. - if (opts.allowShared === false && types.isSharedArrayBuffer(V)) { - throw webidl.errors.exception({ - header: 'ArrayBuffer', - message: 'SharedArrayBuffer is not allowed.' - }) + strm.next_in = 0; + strm.avail_in = strm.input.length; + + do { + if (strm.avail_out === 0) { + strm.output = new utils.Buf8(chunkSize); + strm.next_out = 0; + strm.avail_out = chunkSize; + } + + status = zlib_inflate.inflate(strm, c.Z_NO_FLUSH); /* no bad return value */ + + if (status === c.Z_NEED_DICT && dictionary) { + status = zlib_inflate.inflateSetDictionary(this.strm, dictionary); + } + + if (status === c.Z_BUF_ERROR && allowBufError === true) { + status = c.Z_OK; + allowBufError = false; + } + + if (status !== c.Z_STREAM_END && status !== c.Z_OK) { + this.onEnd(status); + this.ended = true; + return false; + } + + if (strm.next_out) { + if (strm.avail_out === 0 || status === c.Z_STREAM_END || (strm.avail_in === 0 && (_mode === c.Z_FINISH || _mode === c.Z_SYNC_FLUSH))) { + + if (this.options.to === 'string') { + + next_out_utf8 = strings.utf8border(strm.output, strm.next_out); + + tail = strm.next_out - next_out_utf8; + utf8str = strings.buf2string(strm.output, next_out_utf8); + + // move tail + strm.next_out = tail; + strm.avail_out = chunkSize - tail; + if (tail) { utils.arraySet(strm.output, strm.output, next_out_utf8, tail, 0); } + + this.onData(utf8str); + + } else { + this.onData(utils.shrinkBuf(strm.output, strm.next_out)); + } + } + } + + // When no more input data, we should check that internal inflate buffers + // are flushed. The only way to do it when avail_out = 0 - run one more + // inflate pass. But if output data not exists, inflate return Z_BUF_ERROR. + // Here we set flag to process this error properly. + // + // NOTE. Deflate does not return error in this case and does not needs such + // logic. + if (strm.avail_in === 0 && strm.avail_out === 0) { + allowBufError = true; + } + + } while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== c.Z_STREAM_END); + + if (status === c.Z_STREAM_END) { + _mode = c.Z_FINISH; } - // 3. If the conversion is not to an IDL type associated - // with the [AllowResizable] extended attribute, and - // IsResizableArrayBuffer(V) is true, then throw a - // TypeError. - // Note: resizable ArrayBuffers are currently a proposal. + // Finalize on the last chunk. + if (_mode === c.Z_FINISH) { + status = zlib_inflate.inflateEnd(this.strm); + this.onEnd(status); + this.ended = true; + return status === c.Z_OK; + } - // 4. Return the IDL ArrayBuffer value that is a - // reference to the same object as V. - return V + // callback interim results if Z_SYNC_FLUSH. + if (_mode === c.Z_SYNC_FLUSH) { + this.onEnd(c.Z_OK); + strm.avail_out = 0; + return true; + } + + return true; +}; + + +/** + * Inflate#onData(chunk) -> Void + * - chunk (Uint8Array|Array|String): output data. Type of array depends + * on js engine support. When string output requested, each chunk + * will be string. + * + * By default, stores data blocks in `chunks[]` property and glue + * those in `onEnd`. Override this handler, if you need another behaviour. + **/ +Inflate.prototype.onData = function (chunk) { + this.chunks.push(chunk); +}; + + +/** + * Inflate#onEnd(status) -> Void + * - status (Number): inflate status. 0 (Z_OK) on success, + * other if not. + * + * Called either after you tell inflate that the input stream is + * complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH) + * or if an error happened. By default - join collected chunks, + * free memory and fill `results` / `err` properties. + **/ +Inflate.prototype.onEnd = function (status) { + // On success - join + if (status === c.Z_OK) { + if (this.options.to === 'string') { + // Glue & convert here, until we teach pako to send + // utf8 aligned strings to onData + this.result = this.chunks.join(''); + } else { + this.result = utils.flattenChunks(this.chunks); + } + } + this.chunks = []; + this.err = status; + this.msg = this.strm.msg; +}; + + +/** + * inflate(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * Decompress `data` with inflate/ungzip and `options`. Autodetect + * format via wrapper header by default. That's why we don't provide + * separate `ungzip` method. + * + * Supported options are: + * + * - windowBits + * + * [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced) + * for more information. + * + * Sugar (options): + * + * - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify + * negative windowBits implicitly. + * - `to` (String) - if equal to 'string', then result will be converted + * from utf8 to utf16 (javascript) string. When string output requested, + * chunk length can differ from `chunkSize`, depending on content. + * + * + * ##### Example: + * + * ```javascript + * var pako = require('pako') + * , input = pako.deflate([1,2,3,4,5,6,7,8,9]) + * , output; + * + * try { + * output = pako.inflate(input); + * } catch (err) + * console.log(err); + * } + * ``` + **/ +function inflate(input, options) { + var inflator = new Inflate(options); + + inflator.push(input, true); + + // That will never happens, if you don't cheat with options :) + if (inflator.err) { throw inflator.msg || msg[inflator.err]; } + + return inflator.result; } -webidl.converters.TypedArray = function (V, T, opts = {}) { - // 1. Let T be the IDL type V is being converted to. - // 2. If Type(V) is not Object, or V does not have a - // [[TypedArrayName]] internal slot with a value - // equal to T’s name, then throw a TypeError. - if ( - webidl.util.Type(V) !== 'Object' || - !types.isTypedArray(V) || - V.constructor.name !== T.name - ) { - throw webidl.errors.conversionFailed({ - prefix: `${T.name}`, - argument: `${V}`, - types: [T.name] - }) +/** + * inflateRaw(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * The same as [[inflate]], but creates raw data, without wrapper + * (header and adler32 crc). + **/ +function inflateRaw(input, options) { + options = options || {}; + options.raw = true; + return inflate(input, options); +} + + +/** + * ungzip(data[, options]) -> Uint8Array|Array|String + * - data (Uint8Array|Array|String): input data to decompress. + * - options (Object): zlib inflate options. + * + * Just shortcut to [[inflate]], because it autodetects format + * by header.content. Done for convenience. + **/ + + +exports.Inflate = Inflate; +exports.inflate = inflate; +exports.inflateRaw = inflateRaw; +exports.ungzip = inflate; + + +/***/ }), + +/***/ 5483: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + + +var TYPED_OK = (typeof Uint8Array !== 'undefined') && + (typeof Uint16Array !== 'undefined') && + (typeof Int32Array !== 'undefined'); + +function _has(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +exports.assign = function (obj /*from1, from2, from3, ...*/) { + var sources = Array.prototype.slice.call(arguments, 1); + while (sources.length) { + var source = sources.shift(); + if (!source) { continue; } + + if (typeof source !== 'object') { + throw new TypeError(source + 'must be non-object'); + } + + for (var p in source) { + if (_has(source, p)) { + obj[p] = source[p]; + } + } } - // 3. If the conversion is not to an IDL type associated - // with the [AllowShared] extended attribute, and - // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is - // true, then throw a TypeError. - if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { - throw webidl.errors.exception({ - header: 'ArrayBuffer', - message: 'SharedArrayBuffer is not allowed.' - }) + return obj; +}; + + +// reduce buffer size, avoiding mem copy +exports.shrinkBuf = function (buf, size) { + if (buf.length === size) { return buf; } + if (buf.subarray) { return buf.subarray(0, size); } + buf.length = size; + return buf; +}; + + +var fnTyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + if (src.subarray && dest.subarray) { + dest.set(src.subarray(src_offs, src_offs + len), dest_offs); + return; + } + // Fallback to ordinary array + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + var i, l, len, pos, chunk, result; + + // calculate data length + len = 0; + for (i = 0, l = chunks.length; i < l; i++) { + len += chunks[i].length; + } + + // join chunks + result = new Uint8Array(len); + pos = 0; + for (i = 0, l = chunks.length; i < l; i++) { + chunk = chunks[i]; + result.set(chunk, pos); + pos += chunk.length; + } + + return result; } +}; - // 4. If the conversion is not to an IDL type associated - // with the [AllowResizable] extended attribute, and - // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is - // true, then throw a TypeError. - // Note: resizable array buffers are currently a proposal +var fnUntyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + return [].concat.apply([], chunks); + } +}; - // 5. Return the IDL value of type T that is a reference - // to the same object as V. - return V + +// Enable/Disable typed arrays use, for testing +// +exports.setTyped = function (on) { + if (on) { + exports.Buf8 = Uint8Array; + exports.Buf16 = Uint16Array; + exports.Buf32 = Int32Array; + exports.assign(exports, fnTyped); + } else { + exports.Buf8 = Array; + exports.Buf16 = Array; + exports.Buf32 = Array; + exports.assign(exports, fnUntyped); + } +}; + +exports.setTyped(TYPED_OK); + + +/***/ }), + +/***/ 42380: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +// String encode/decode helpers + + + +var utils = __nccwpck_require__(5483); + + +// Quick check if we can use fast array to bin string conversion +// +// - apply(Array) can fail on Android 2.2 +// - apply(Uint8Array) can fail on iOS 5.1 Safari +// +var STR_APPLY_OK = true; +var STR_APPLY_UIA_OK = true; + +try { String.fromCharCode.apply(null, [ 0 ]); } catch (__) { STR_APPLY_OK = false; } +try { String.fromCharCode.apply(null, new Uint8Array(1)); } catch (__) { STR_APPLY_UIA_OK = false; } + + +// Table with utf8 lengths (calculated by first byte of sequence) +// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS, +// because max possible codepoint is 0x10ffff +var _utf8len = new utils.Buf8(256); +for (var q = 0; q < 256; q++) { + _utf8len[q] = (q >= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1); } +_utf8len[254] = _utf8len[254] = 1; // Invalid sequence start -webidl.converters.DataView = function (V, opts = {}) { - // 1. If Type(V) is not Object, or V does not have a - // [[DataView]] internal slot, then throw a TypeError. - if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) { - throw webidl.errors.exception({ - header: 'DataView', - message: 'Object is not a DataView.' - }) + +// convert string to array (typed, when possible) +exports.string2buf = function (str) { + var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0; + + // count binary size + for (m_pos = 0; m_pos < str_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4; } - // 2. If the conversion is not to an IDL type associated - // with the [AllowShared] extended attribute, and - // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true, - // then throw a TypeError. - if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { - throw webidl.errors.exception({ - header: 'ArrayBuffer', - message: 'SharedArrayBuffer is not allowed.' - }) + // allocate buffer + buf = new utils.Buf8(buf_len); + + // convert + for (i = 0, m_pos = 0; i < buf_len; m_pos++) { + c = str.charCodeAt(m_pos); + if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) { + c2 = str.charCodeAt(m_pos + 1); + if ((c2 & 0xfc00) === 0xdc00) { + c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00); + m_pos++; + } + } + if (c < 0x80) { + /* one byte */ + buf[i++] = c; + } else if (c < 0x800) { + /* two bytes */ + buf[i++] = 0xC0 | (c >>> 6); + buf[i++] = 0x80 | (c & 0x3f); + } else if (c < 0x10000) { + /* three bytes */ + buf[i++] = 0xE0 | (c >>> 12); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } else { + /* four bytes */ + buf[i++] = 0xf0 | (c >>> 18); + buf[i++] = 0x80 | (c >>> 12 & 0x3f); + buf[i++] = 0x80 | (c >>> 6 & 0x3f); + buf[i++] = 0x80 | (c & 0x3f); + } } - // 3. If the conversion is not to an IDL type associated - // with the [AllowResizable] extended attribute, and - // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is - // true, then throw a TypeError. - // Note: resizable ArrayBuffers are currently a proposal + return buf; +}; - // 4. Return the IDL DataView value that is a reference - // to the same object as V. - return V +// Helper (used in 2 places) +function buf2binstring(buf, len) { + // On Chrome, the arguments in a function call that are allowed is `65534`. + // If the length of the buffer is smaller than that, we can use this optimization, + // otherwise we will take a slower path. + if (len < 65534) { + if ((buf.subarray && STR_APPLY_UIA_OK) || (!buf.subarray && STR_APPLY_OK)) { + return String.fromCharCode.apply(null, utils.shrinkBuf(buf, len)); + } + } + + var result = ''; + for (var i = 0; i < len; i++) { + result += String.fromCharCode(buf[i]); + } + return result; } -// https://webidl.spec.whatwg.org/#BufferSource -webidl.converters.BufferSource = function (V, opts = {}) { - if (types.isAnyArrayBuffer(V)) { - return webidl.converters.ArrayBuffer(V, opts) + +// Convert byte array to binary string +exports.buf2binstring = function (buf) { + return buf2binstring(buf, buf.length); +}; + + +// Convert binary string (typed, when possible) +exports.binstring2buf = function (str) { + var buf = new utils.Buf8(str.length); + for (var i = 0, len = buf.length; i < len; i++) { + buf[i] = str.charCodeAt(i); } + return buf; +}; - if (types.isTypedArray(V)) { - return webidl.converters.TypedArray(V, V.constructor) + +// convert array to string +exports.buf2string = function (buf, max) { + var i, out, c, c_len; + var len = max || buf.length; + + // Reserve max possible length (2 words per char) + // NB: by unknown reasons, Array is significantly faster for + // String.fromCharCode.apply than Uint16Array. + var utf16buf = new Array(len * 2); + + for (out = 0, i = 0; i < len;) { + c = buf[i++]; + // quick process ascii + if (c < 0x80) { utf16buf[out++] = c; continue; } + + c_len = _utf8len[c]; + // skip 5 & 6 byte codes + if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; } + + // apply mask on first byte + c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07; + // join the rest + while (c_len > 1 && i < len) { + c = (c << 6) | (buf[i++] & 0x3f); + c_len--; + } + + // terminated by end of string? + if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; } + + if (c < 0x10000) { + utf16buf[out++] = c; + } else { + c -= 0x10000; + utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff); + utf16buf[out++] = 0xdc00 | (c & 0x3ff); + } } - if (types.isDataView(V)) { - return webidl.converters.DataView(V, opts) + return buf2binstring(utf16buf, out); +}; + + +// Calculate max possible position in utf8 buffer, +// that will not break sequence. If that's not possible +// - (very small limits) return max size as is. +// +// buf[] - utf8 bytes array +// max - length limit (mandatory); +exports.utf8border = function (buf, max) { + var pos; + + max = max || buf.length; + if (max > buf.length) { max = buf.length; } + + // go back from last position, until start of sequence found + pos = max - 1; + while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; } + + // Very small and broken sequence, + // return max, because we should return something anyway. + if (pos < 0) { return max; } + + // If we came to start of buffer - that means buffer is too small, + // return max too. + if (pos === 0) { return max; } + + return (pos + _utf8len[buf[pos]] > max) ? pos : max; +}; + + +/***/ }), + +/***/ 86924: +/***/ ((module) => { + +"use strict"; + + +// Note: adler32 takes 12% for level 0 and 2% for level 6. +// It isn't worth it to make additional optimizations as in original. +// Small size is preferable. + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +function adler32(adler, buf, len, pos) { + var s1 = (adler & 0xffff) |0, + s2 = ((adler >>> 16) & 0xffff) |0, + n = 0; + + while (len !== 0) { + // Set limit ~ twice less than 5552, to keep + // s2 in 31-bits, because we force signed ints. + // in other case %= will fail. + n = len > 2000 ? 2000 : len; + len -= n; + + do { + s1 = (s1 + buf[pos++]) |0; + s2 = (s2 + s1) |0; + } while (--n); + + s1 %= 65521; + s2 %= 65521; } - throw new TypeError(`Could not convert ${V} to a BufferSource.`) + return (s1 | (s2 << 16)) |0; } -webidl.converters['sequence'] = webidl.sequenceConverter( - webidl.converters.ByteString -) -webidl.converters['sequence>'] = webidl.sequenceConverter( - webidl.converters['sequence'] -) +module.exports = adler32; -webidl.converters['record'] = webidl.recordConverter( - webidl.converters.ByteString, - webidl.converters.ByteString -) + +/***/ }), + +/***/ 58282: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. module.exports = { - webidl -} + + /* Allowed flush values; see deflate() and inflate() below for details */ + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_TREES: 6, + + /* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + //Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + //Z_VERSION_ERROR: -6, + + /* compression levels */ + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + + + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + + /* Possible values of the data_type field (though see inflate()) */ + Z_BINARY: 0, + Z_TEXT: 1, + //Z_ASCII: 1, // = Z_TEXT (deprecated) + Z_UNKNOWN: 2, + + /* The deflate compression method */ + Z_DEFLATED: 8 + //Z_NULL: null // Use -1 or null inline, depending on var type +}; /***/ }), -/***/ 4854: +/***/ 87242: /***/ ((module) => { "use strict"; -/** - * @see https://encoding.spec.whatwg.org/#concept-encoding-get - * @param {string|undefined} label +// Note: we can't get significant speed boost here. +// So write code to minimize size - no pregenerated tables +// and array tools dependencies. + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +// Use ordinary array, since untyped makes no boost here +function makeTable() { + var c, table = []; + + for (var n = 0; n < 256; n++) { + c = n; + for (var k = 0; k < 8; k++) { + c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); + } + table[n] = c; + } + + return table; +} + +// Create table on load. Just 255 signed longs. Not a problem. +var crcTable = makeTable(); + + +function crc32(crc, buf, len, pos) { + var t = crcTable, + end = pos + len; + + crc ^= -1; + + for (var i = pos; i < end; i++) { + crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF]; + } + + return (crc ^ (-1)); // >>> 0; +} + + +module.exports = crc32; + + +/***/ }), + +/***/ 70978: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __nccwpck_require__(5483); +var trees = __nccwpck_require__(78754); +var adler32 = __nccwpck_require__(86924); +var crc32 = __nccwpck_require__(87242); +var msg = __nccwpck_require__(1890); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +var Z_NO_FLUSH = 0; +var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +//var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. */ -function getEncoding (label) { - if (!label) { - return 'failure' +var Z_OK = 0; +var Z_STREAM_END = 1; +//var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +//var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + + +/* compression levels */ +//var Z_NO_COMPRESSION = 0; +//var Z_BEST_SPEED = 1; +//var Z_BEST_COMPRESSION = 9; +var Z_DEFAULT_COMPRESSION = -1; + + +var Z_FILTERED = 1; +var Z_HUFFMAN_ONLY = 2; +var Z_RLE = 3; +var Z_FIXED = 4; +var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +//var Z_BINARY = 0; +//var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + + +/* The deflate compression method */ +var Z_DEFLATED = 8; + +/*============================================================================*/ + + +var MAX_MEM_LEVEL = 9; +/* Maximum value for memLevel in deflateInit2 */ +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_MEM_LEVEL = 8; + + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ +var LITERALS = 256; +/* number of literal bytes 0..255 */ +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ +var D_CODES = 30; +/* number of distance codes */ +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +var MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1); + +var PRESET_DICT = 0x20; + +var INIT_STATE = 42; +var EXTRA_STATE = 69; +var NAME_STATE = 73; +var COMMENT_STATE = 91; +var HCRC_STATE = 103; +var BUSY_STATE = 113; +var FINISH_STATE = 666; + +var BS_NEED_MORE = 1; /* block not completed, need more input or more output */ +var BS_BLOCK_DONE = 2; /* block flush performed */ +var BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */ +var BS_FINISH_DONE = 4; /* finish done, accept no more input or output */ + +var OS_CODE = 0x03; // Unix :) . Don't detect, use this default. + +function err(strm, errorCode) { + strm.msg = msg[errorCode]; + return errorCode; +} + +function rank(f) { + return ((f) << 1) - ((f) > 4 ? 9 : 0); +} + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + + +/* ========================================================================= + * Flush as much pending output as possible. All deflate() output goes + * through this function so some applications may wish to modify it + * to avoid allocating a large strm->output buffer and copying into it. + * (See also read_buf()). + */ +function flush_pending(strm) { + var s = strm.state; + + //_tr_flush_bits(s); + var len = s.pending; + if (len > strm.avail_out) { + len = strm.avail_out; } + if (len === 0) { return; } - // 1. Remove any leading and trailing ASCII whitespace from label. - // 2. If label is an ASCII case-insensitive match for any of the - // labels listed in the table below, then return the - // corresponding encoding; otherwise return failure. - switch (label.trim().toLowerCase()) { - case 'unicode-1-1-utf-8': - case 'unicode11utf8': - case 'unicode20utf8': - case 'utf-8': - case 'utf8': - case 'x-unicode20utf8': - return 'UTF-8' - case '866': - case 'cp866': - case 'csibm866': - case 'ibm866': - return 'IBM866' - case 'csisolatin2': - case 'iso-8859-2': - case 'iso-ir-101': - case 'iso8859-2': - case 'iso88592': - case 'iso_8859-2': - case 'iso_8859-2:1987': - case 'l2': - case 'latin2': - return 'ISO-8859-2' - case 'csisolatin3': - case 'iso-8859-3': - case 'iso-ir-109': - case 'iso8859-3': - case 'iso88593': - case 'iso_8859-3': - case 'iso_8859-3:1988': - case 'l3': - case 'latin3': - return 'ISO-8859-3' - case 'csisolatin4': - case 'iso-8859-4': - case 'iso-ir-110': - case 'iso8859-4': - case 'iso88594': - case 'iso_8859-4': - case 'iso_8859-4:1988': - case 'l4': - case 'latin4': - return 'ISO-8859-4' - case 'csisolatincyrillic': - case 'cyrillic': - case 'iso-8859-5': - case 'iso-ir-144': - case 'iso8859-5': - case 'iso88595': - case 'iso_8859-5': - case 'iso_8859-5:1988': - return 'ISO-8859-5' - case 'arabic': - case 'asmo-708': - case 'csiso88596e': - case 'csiso88596i': - case 'csisolatinarabic': - case 'ecma-114': - case 'iso-8859-6': - case 'iso-8859-6-e': - case 'iso-8859-6-i': + utils.arraySet(strm.output, s.pending_buf, s.pending_out, len, strm.next_out); + strm.next_out += len; + s.pending_out += len; + strm.total_out += len; + strm.avail_out -= len; + s.pending -= len; + if (s.pending === 0) { + s.pending_out = 0; + } +} + + +function flush_block_only(s, last) { + trees._tr_flush_block(s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last); + s.block_start = s.strstart; + flush_pending(s.strm); +} + + +function put_byte(s, b) { + s.pending_buf[s.pending++] = b; +} + + +/* ========================================================================= + * Put a short in the pending buffer. The 16-bit value is put in MSB order. + * IN assertion: the stream state is correct and there is enough room in + * pending_buf. + */ +function putShortMSB(s, b) { +// put_byte(s, (Byte)(b >> 8)); +// put_byte(s, (Byte)(b & 0xff)); + s.pending_buf[s.pending++] = (b >>> 8) & 0xff; + s.pending_buf[s.pending++] = b & 0xff; +} + + +/* =========================================================================== + * Read a new buffer from the current input stream, update the adler32 + * and total number of bytes read. All deflate() input goes through + * this function so some applications may wish to modify it to avoid + * allocating a large strm->input buffer and copying from it. + * (See also flush_pending()). + */ +function read_buf(strm, buf, start, size) { + var len = strm.avail_in; + + if (len > size) { len = size; } + if (len === 0) { return 0; } + + strm.avail_in -= len; + + // zmemcpy(buf, strm->next_in, len); + utils.arraySet(buf, strm.input, strm.next_in, len, start); + if (strm.state.wrap === 1) { + strm.adler = adler32(strm.adler, buf, len, start); + } + + else if (strm.state.wrap === 2) { + strm.adler = crc32(strm.adler, buf, len, start); + } + + strm.next_in += len; + strm.total_in += len; + + return len; +} + + +/* =========================================================================== + * Set match_start to the longest match starting at the given string and + * return its length. Matches shorter or equal to prev_length are discarded, + * in which case the result is equal to prev_length and match_start is + * garbage. + * IN assertions: cur_match is the head of the hash chain for the current + * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 + * OUT assertion: the match length is not greater than s->lookahead. + */ +function longest_match(s, cur_match) { + var chain_length = s.max_chain_length; /* max hash chain length */ + var scan = s.strstart; /* current string */ + var match; /* matched string */ + var len; /* length of current match */ + var best_len = s.prev_length; /* best match length so far */ + var nice_match = s.nice_match; /* stop if match long enough */ + var limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ? + s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0/*NIL*/; + + var _win = s.window; // shortcut + + var wmask = s.w_mask; + var prev = s.prev; + + /* Stop when cur_match becomes <= limit. To simplify the code, + * we prevent matches with the string of window index 0. + */ + + var strend = s.strstart + MAX_MATCH; + var scan_end1 = _win[scan + best_len - 1]; + var scan_end = _win[scan + best_len]; + + /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. + * It is easy to get rid of this optimization if necessary. + */ + // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); + + /* Do not waste too much time if we already have a good match: */ + if (s.prev_length >= s.good_match) { + chain_length >>= 2; + } + /* Do not look for matches beyond the end of the input. This is necessary + * to make deflate deterministic. + */ + if (nice_match > s.lookahead) { nice_match = s.lookahead; } + + // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); + + do { + // Assert(cur_match < s->strstart, "no future"); + match = cur_match; + + /* Skip to next match if the match length cannot increase + * or if the match length is less than 2. Note that the checks below + * for insufficient lookahead only occur occasionally for performance + * reasons. Therefore uninitialized memory will be accessed, and + * conditional jumps will be made that depend on those values. + * However the length of the match is limited to the lookahead, so + * the output of deflate is not affected by the uninitialized values. + */ + + if (_win[match + best_len] !== scan_end || + _win[match + best_len - 1] !== scan_end1 || + _win[match] !== _win[scan] || + _win[++match] !== _win[scan + 1]) { + continue; + } + + /* The check at best_len-1 can be removed because it will be made + * again later. (This heuristic is not always a win.) + * It is not necessary to compare scan[2] and match[2] since they + * are always equal when the other bytes match, given that + * the hash keys are equal and that HASH_BITS >= 8. + */ + scan += 2; + match++; + // Assert(*scan == *match, "match[2]?"); + + /* We check for insufficient lookahead only every 8th comparison; + * the 256th check will be made at strstart+258. + */ + do { + /*jshint noempty:false*/ + } while (_win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + scan < strend); + + // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); + + len = MAX_MATCH - (strend - scan); + scan = strend - MAX_MATCH; + + if (len > best_len) { + s.match_start = cur_match; + best_len = len; + if (len >= nice_match) { + break; + } + scan_end1 = _win[scan + best_len - 1]; + scan_end = _win[scan + best_len]; + } + } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0); + + if (best_len <= s.lookahead) { + return best_len; + } + return s.lookahead; +} + + +/* =========================================================================== + * Fill the window when the lookahead becomes insufficient. + * Updates strstart and lookahead. + * + * IN assertion: lookahead < MIN_LOOKAHEAD + * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD + * At least one byte has been read, or avail_in == 0; reads are + * performed for at least two bytes (required for the zip translate_eol + * option -- not supported here). + */ +function fill_window(s) { + var _w_size = s.w_size; + var p, n, m, more, str; + + //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead"); + + do { + more = s.window_size - s.lookahead - s.strstart; + + // JS ints have 32 bit, block below not needed + /* Deal with !@#$% 64K limit: */ + //if (sizeof(int) <= 2) { + // if (more == 0 && s->strstart == 0 && s->lookahead == 0) { + // more = wsize; + // + // } else if (more == (unsigned)(-1)) { + // /* Very unlikely, but possible on 16 bit machine if + // * strstart == 0 && lookahead == 1 (input done a byte at time) + // */ + // more--; + // } + //} + + + /* If the window is almost full and there is insufficient lookahead, + * move the upper half to the lower one to make room in the upper half. + */ + if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) { + + utils.arraySet(s.window, s.window, _w_size, _w_size, 0); + s.match_start -= _w_size; + s.strstart -= _w_size; + /* we now have strstart >= MAX_DIST */ + s.block_start -= _w_size; + + /* Slide the hash table (could be avoided with 32 bit values + at the expense of memory usage). We slide even when level == 0 + to keep the hash table consistent if we switch back to level > 0 + later. (Using level 0 permanently is not an optimal usage of + zlib, so we don't care about this pathological case.) + */ + + n = s.hash_size; + p = n; + do { + m = s.head[--p]; + s.head[p] = (m >= _w_size ? m - _w_size : 0); + } while (--n); + + n = _w_size; + p = n; + do { + m = s.prev[--p]; + s.prev[p] = (m >= _w_size ? m - _w_size : 0); + /* If n is not on any hash chain, prev[n] is garbage but + * its value will never be used. + */ + } while (--n); + + more += _w_size; + } + if (s.strm.avail_in === 0) { + break; + } + + /* If there was no sliding: + * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && + * more == window_size - lookahead - strstart + * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) + * => more >= window_size - 2*WSIZE + 2 + * In the BIG_MEM or MMAP case (not yet supported), + * window_size == input_size + MIN_LOOKAHEAD && + * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. + * Otherwise, window_size == 2*WSIZE so more >= 2. + * If there was sliding, more >= WSIZE. So in all cases, more >= 2. + */ + //Assert(more >= 2, "more < 2"); + n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more); + s.lookahead += n; + + /* Initialize the hash value now that we have some input: */ + if (s.lookahead + s.insert >= MIN_MATCH) { + str = s.strstart - s.insert; + s.ins_h = s.window[str]; + + /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + 1]) & s.hash_mask; +//#if MIN_MATCH != 3 +// Call update_hash() MIN_MATCH-3 more times +//#endif + while (s.insert) { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = str; + str++; + s.insert--; + if (s.lookahead + s.insert < MIN_MATCH) { + break; + } + } + } + /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, + * but this is not important since only literal bytes will be emitted. + */ + + } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0); + + /* If the WIN_INIT bytes after the end of the current data have never been + * written, then zero those bytes in order to avoid memory check reports of + * the use of uninitialized (or uninitialised as Julian writes) bytes by + * the longest match routines. Update the high water mark for the next + * time through here. WIN_INIT is set to MAX_MATCH since the longest match + * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead. + */ +// if (s.high_water < s.window_size) { +// var curr = s.strstart + s.lookahead; +// var init = 0; +// +// if (s.high_water < curr) { +// /* Previous high water mark below current data -- zero WIN_INIT +// * bytes or up to end of window, whichever is less. +// */ +// init = s.window_size - curr; +// if (init > WIN_INIT) +// init = WIN_INIT; +// zmemzero(s->window + curr, (unsigned)init); +// s->high_water = curr + init; +// } +// else if (s->high_water < (ulg)curr + WIN_INIT) { +// /* High water mark at or above current data, but below current data +// * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up +// * to end of window, whichever is less. +// */ +// init = (ulg)curr + WIN_INIT - s->high_water; +// if (init > s->window_size - s->high_water) +// init = s->window_size - s->high_water; +// zmemzero(s->window + s->high_water, (unsigned)init); +// s->high_water += init; +// } +// } +// +// Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, +// "not enough room for search"); +} + +/* =========================================================================== + * Copy without compression as much as possible from the input stream, return + * the current block state. + * This function does not insert new strings in the dictionary since + * uncompressible data is probably not useful. This function is used + * only for the level=0 compression option. + * NOTE: this function should be optimized to avoid extra copying from + * window to pending_buf. + */ +function deflate_stored(s, flush) { + /* Stored blocks are limited to 0xffff bytes, pending_buf is limited + * to pending_buf_size, and each stored block has a 5 byte header: + */ + var max_block_size = 0xffff; + + if (max_block_size > s.pending_buf_size - 5) { + max_block_size = s.pending_buf_size - 5; + } + + /* Copy as much as possible from input to output: */ + for (;;) { + /* Fill the window as much as possible: */ + if (s.lookahead <= 1) { + + //Assert(s->strstart < s->w_size+MAX_DIST(s) || + // s->block_start >= (long)s->w_size, "slide too late"); +// if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) || +// s.block_start >= s.w_size)) { +// throw new Error("slide too late"); +// } + + fill_window(s); + if (s.lookahead === 0 && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + + if (s.lookahead === 0) { + break; + } + /* flush the current block */ + } + //Assert(s->block_start >= 0L, "block gone"); +// if (s.block_start < 0) throw new Error("block gone"); + + s.strstart += s.lookahead; + s.lookahead = 0; + + /* Emit a stored block if pending_buf will be full: */ + var max_start = s.block_start + max_block_size; + + if (s.strstart === 0 || s.strstart >= max_start) { + /* strstart == 0 is possible when wraparound on 16-bit machine */ + s.lookahead = s.strstart - max_start; + s.strstart = max_start; + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + + + } + /* Flush if we may have to slide, otherwise block_start may become + * negative and the data will be gone: + */ + if (s.strstart - s.block_start >= (s.w_size - MIN_LOOKAHEAD)) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + + s.insert = 0; + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + + if (s.strstart > s.block_start) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_NEED_MORE; +} + +/* =========================================================================== + * Compress as much as possible from the input stream, return the current + * block state. + * This function does not perform lazy evaluation of matches and inserts + * new strings in the dictionary only for unmatched strings or for short + * matches. It is used only for the fast compression options. + */ +function deflate_fast(s, flush) { + var hash_head; /* head of the hash chain */ + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { + break; /* flush the current block */ + } + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + * At this point we have always match_length < MIN_MATCH + */ + if (hash_head !== 0/*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD))) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + } + if (s.match_length >= MIN_MATCH) { + // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only + + /*** _tr_tally_dist(s, s.strstart - s.match_start, + s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, s.strstart - s.match_start, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + + /* Insert new strings in the hash table only if the match length + * is not too large. This saves time but degrades compression. + */ + if (s.match_length <= s.max_lazy_match/*max_insert_length*/ && s.lookahead >= MIN_MATCH) { + s.match_length--; /* string at strstart already in table */ + do { + s.strstart++; + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + /* strstart never exceeds WSIZE-MAX_MATCH, so there are + * always MIN_MATCH bytes ahead. + */ + } while (--s.match_length !== 0); + s.strstart++; + } else + { + s.strstart += s.match_length; + s.match_length = 0; + s.ins_h = s.window[s.strstart]; + /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + 1]) & s.hash_mask; + +//#if MIN_MATCH != 3 +// Call UPDATE_HASH() MIN_MATCH-3 more times +//#endif + /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not + * matter since it will be recomputed at next deflate call. + */ + } + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s.window[s.strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1); + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * Same as above, but achieves better compression. We use a lazy + * evaluation for matches: a match is finally adopted only if there is + * no better match at the next window position. + */ +function deflate_slow(s, flush) { + var hash_head; /* head of hash chain */ + var bflush; /* set if current block must be flushed */ + + var max_insert; + + /* Process the input block. */ + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + */ + s.prev_length = s.match_length; + s.prev_match = s.match_start; + s.match_length = MIN_MATCH - 1; + + if (hash_head !== 0/*NIL*/ && s.prev_length < s.max_lazy_match && + s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD)/*MAX_DIST(s)*/) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + + if (s.match_length <= 5 && + (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096/*TOO_FAR*/))) { + + /* If prev_match is also MIN_MATCH, match_start is garbage + * but we will ignore the current match anyway. + */ + s.match_length = MIN_MATCH - 1; + } + } + /* If there was a match at the previous step and the current + * match is not better, output the previous match: + */ + if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) { + max_insert = s.strstart + s.lookahead - MIN_MATCH; + /* Do not insert strings in hash table beyond this. */ + + //check_match(s, s.strstart-1, s.prev_match, s.prev_length); + + /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match, + s.prev_length - MIN_MATCH, bflush);***/ + bflush = trees._tr_tally(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH); + /* Insert in hash table all strings up to the end of the match. + * strstart-1 and strstart are already inserted. If there is not + * enough lookahead, the last two strings are not inserted in + * the hash table. + */ + s.lookahead -= s.prev_length - 1; + s.prev_length -= 2; + do { + if (++s.strstart <= max_insert) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + } while (--s.prev_length !== 0); + s.match_available = 0; + s.match_length = MIN_MATCH - 1; + s.strstart++; + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + } else if (s.match_available) { + /* If there was no match at the previous position, output a + * single literal. If there was a match but the current match + * is longer, truncate the previous match to a single literal. + */ + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + if (bflush) { + /*** FLUSH_BLOCK_ONLY(s, 0) ***/ + flush_block_only(s, false); + /***/ + } + s.strstart++; + s.lookahead--; + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + } else { + /* There is no previous match to compare with, wait for + * the next step to decide. + */ + s.match_available = 1; + s.strstart++; + s.lookahead--; + } + } + //Assert (flush != Z_NO_FLUSH, "no flush?"); + if (s.match_available) { + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + s.match_available = 0; + } + s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_BLOCK_DONE; +} + + +/* =========================================================================== + * For Z_RLE, simply look for runs of bytes, generate matches only of distance + * one. Do not maintain a hash table. (It will be regenerated if this run of + * deflate switches away from Z_RLE.) + */ +function deflate_rle(s, flush) { + var bflush; /* set if current block must be flushed */ + var prev; /* byte at distance one to match */ + var scan, strend; /* scan goes up to strend for length of run */ + + var _win = s.window; + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the longest run, plus one for the unrolled loop. + */ + if (s.lookahead <= MAX_MATCH) { + fill_window(s); + if (s.lookahead <= MAX_MATCH && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* See how many times the previous byte repeats */ + s.match_length = 0; + if (s.lookahead >= MIN_MATCH && s.strstart > 0) { + scan = s.strstart - 1; + prev = _win[scan]; + if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) { + strend = s.strstart + MAX_MATCH; + do { + /*jshint noempty:false*/ + } while (prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + scan < strend); + s.match_length = MAX_MATCH - (strend - scan); + if (s.match_length > s.lookahead) { + s.match_length = s.lookahead; + } + } + //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); + } + + /* Emit match if have run of MIN_MATCH or longer, else emit literal */ + if (s.match_length >= MIN_MATCH) { + //check_match(s, s.strstart, s.strstart - 1, s.match_length); + + /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, 1, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + s.strstart += s.match_length; + s.match_length = 0; + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table. + * (It will be regenerated if this run of deflate switches away from Huffman.) + */ +function deflate_huff(s, flush) { + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we have a literal to write. */ + if (s.lookahead === 0) { + fill_window(s); + if (s.lookahead === 0) { + if (flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + break; /* flush the current block */ + } + } + + /* Output a literal byte */ + s.match_length = 0; + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + s.lookahead--; + s.strstart++; + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* Values for max_lazy_match, good_match and max_chain_length, depending on + * the desired pack level (0..9). The values given below have been tuned to + * exclude worst case performance for pathological files. Better values may be + * found for specific files. + */ +function Config(good_length, max_lazy, nice_length, max_chain, func) { + this.good_length = good_length; + this.max_lazy = max_lazy; + this.nice_length = nice_length; + this.max_chain = max_chain; + this.func = func; +} + +var configuration_table; + +configuration_table = [ + /* good lazy nice chain */ + new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */ + new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */ + new Config(4, 5, 16, 8, deflate_fast), /* 2 */ + new Config(4, 6, 32, 32, deflate_fast), /* 3 */ + + new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */ + new Config(8, 16, 32, 32, deflate_slow), /* 5 */ + new Config(8, 16, 128, 128, deflate_slow), /* 6 */ + new Config(8, 32, 128, 256, deflate_slow), /* 7 */ + new Config(32, 128, 258, 1024, deflate_slow), /* 8 */ + new Config(32, 258, 258, 4096, deflate_slow) /* 9 max compression */ +]; + + +/* =========================================================================== + * Initialize the "longest match" routines for a new zlib stream + */ +function lm_init(s) { + s.window_size = 2 * s.w_size; + + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + + /* Set the default configuration parameters: + */ + s.max_lazy_match = configuration_table[s.level].max_lazy; + s.good_match = configuration_table[s.level].good_length; + s.nice_match = configuration_table[s.level].nice_length; + s.max_chain_length = configuration_table[s.level].max_chain; + + s.strstart = 0; + s.block_start = 0; + s.lookahead = 0; + s.insert = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + s.ins_h = 0; +} + + +function DeflateState() { + this.strm = null; /* pointer back to this zlib stream */ + this.status = 0; /* as the name implies */ + this.pending_buf = null; /* output still pending */ + this.pending_buf_size = 0; /* size of pending_buf */ + this.pending_out = 0; /* next pending byte to output to the stream */ + this.pending = 0; /* nb of bytes in the pending buffer */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.gzhead = null; /* gzip header information to write */ + this.gzindex = 0; /* where in extra, name, or comment */ + this.method = Z_DEFLATED; /* can only be DEFLATED */ + this.last_flush = -1; /* value of flush param for previous deflate call */ + + this.w_size = 0; /* LZ77 window size (32K by default) */ + this.w_bits = 0; /* log2(w_size) (8..16) */ + this.w_mask = 0; /* w_size - 1 */ + + this.window = null; + /* Sliding window. Input bytes are read into the second half of the window, + * and move to the first half later to keep a dictionary of at least wSize + * bytes. With this organization, matches are limited to a distance of + * wSize-MAX_MATCH bytes, but this ensures that IO is always + * performed with a length multiple of the block size. + */ + + this.window_size = 0; + /* Actual size of window: 2*wSize, except when the user input buffer + * is directly used as sliding window. + */ + + this.prev = null; + /* Link to older string with same hash index. To limit the size of this + * array to 64K, this link is maintained only for the last 32K strings. + * An index in this array is thus a window index modulo 32K. + */ + + this.head = null; /* Heads of the hash chains or NIL. */ + + this.ins_h = 0; /* hash index of string to be inserted */ + this.hash_size = 0; /* number of elements in hash table */ + this.hash_bits = 0; /* log2(hash_size) */ + this.hash_mask = 0; /* hash_size-1 */ + + this.hash_shift = 0; + /* Number of bits by which ins_h must be shifted at each input + * step. It must be such that after MIN_MATCH steps, the oldest + * byte no longer takes part in the hash key, that is: + * hash_shift * MIN_MATCH >= hash_bits + */ + + this.block_start = 0; + /* Window position at the beginning of the current output block. Gets + * negative when the window is moved backwards. + */ + + this.match_length = 0; /* length of best match */ + this.prev_match = 0; /* previous match */ + this.match_available = 0; /* set if previous match exists */ + this.strstart = 0; /* start of string to insert */ + this.match_start = 0; /* start of matching string */ + this.lookahead = 0; /* number of valid bytes ahead in window */ + + this.prev_length = 0; + /* Length of the best match at previous step. Matches not greater than this + * are discarded. This is used in the lazy match evaluation. + */ + + this.max_chain_length = 0; + /* To speed up deflation, hash chains are never searched beyond this + * length. A higher limit improves compression ratio but degrades the + * speed. + */ + + this.max_lazy_match = 0; + /* Attempt to find a better match only when the current match is strictly + * smaller than this value. This mechanism is used only for compression + * levels >= 4. + */ + // That's alias to max_lazy_match, don't use directly + //this.max_insert_length = 0; + /* Insert new strings in the hash table only if the match length is not + * greater than this length. This saves time but degrades compression. + * max_insert_length is used only for compression levels <= 3. + */ + + this.level = 0; /* compression level (1..9) */ + this.strategy = 0; /* favor or force Huffman coding*/ + + this.good_match = 0; + /* Use a faster search when the previous match is longer than this */ + + this.nice_match = 0; /* Stop searching when current match exceeds this */ + + /* used by trees.c: */ + + /* Didn't use ct_data typedef below to suppress compiler warning */ + + // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ + // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ + // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ + + // Use flat array of DOUBLE size, with interleaved fata, + // because JS does not support effective + this.dyn_ltree = new utils.Buf16(HEAP_SIZE * 2); + this.dyn_dtree = new utils.Buf16((2 * D_CODES + 1) * 2); + this.bl_tree = new utils.Buf16((2 * BL_CODES + 1) * 2); + zero(this.dyn_ltree); + zero(this.dyn_dtree); + zero(this.bl_tree); + + this.l_desc = null; /* desc. for literal tree */ + this.d_desc = null; /* desc. for distance tree */ + this.bl_desc = null; /* desc. for bit length tree */ + + //ush bl_count[MAX_BITS+1]; + this.bl_count = new utils.Buf16(MAX_BITS + 1); + /* number of codes at each bit length for an optimal tree */ + + //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ + this.heap = new utils.Buf16(2 * L_CODES + 1); /* heap used to build the Huffman trees */ + zero(this.heap); + + this.heap_len = 0; /* number of elements in the heap */ + this.heap_max = 0; /* element of largest frequency */ + /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. + * The same heap array is used to build all trees. + */ + + this.depth = new utils.Buf16(2 * L_CODES + 1); //uch depth[2*L_CODES+1]; + zero(this.depth); + /* Depth of each subtree used as tie breaker for trees of equal frequency + */ + + this.l_buf = 0; /* buffer index for literals or lengths */ + + this.lit_bufsize = 0; + /* Size of match buffer for literals/lengths. There are 4 reasons for + * limiting lit_bufsize to 64K: + * - frequencies can be kept in 16 bit counters + * - if compression is not successful for the first block, all input + * data is still in the window so we can still emit a stored block even + * when input comes from standard input. (This can also be done for + * all blocks if lit_bufsize is not greater than 32K.) + * - if compression is not successful for a file smaller than 64K, we can + * even emit a stored file instead of a stored block (saving 5 bytes). + * This is applicable only for zip (not gzip or zlib). + * - creating new Huffman trees less frequently may not provide fast + * adaptation to changes in the input data statistics. (Take for + * example a binary file with poorly compressible code followed by + * a highly compressible string table.) Smaller buffer sizes give + * fast adaptation but have of course the overhead of transmitting + * trees more frequently. + * - I can't count above 4 + */ + + this.last_lit = 0; /* running index in l_buf */ + + this.d_buf = 0; + /* Buffer index for distances. To simplify the code, d_buf and l_buf have + * the same number of elements. To use different lengths, an extra flag + * array would be necessary. + */ + + this.opt_len = 0; /* bit length of current block with optimal trees */ + this.static_len = 0; /* bit length of current block with static trees */ + this.matches = 0; /* number of string matches in current block */ + this.insert = 0; /* bytes at end of window left to insert */ + + + this.bi_buf = 0; + /* Output buffer. bits are inserted starting at the bottom (least + * significant bits). + */ + this.bi_valid = 0; + /* Number of valid bits in bi_buf. All bits above the last valid bit + * are always zero. + */ + + // Used for window memory init. We safely ignore it for JS. That makes + // sense only for pointers and memory check tools. + //this.high_water = 0; + /* High water mark offset in window for initialized bytes -- bytes above + * this are set to zero in order to avoid memory check warnings when + * longest match routines access bytes past the input. This is then + * updated to the new high water mark. + */ +} + + +function deflateResetKeep(strm) { + var s; + + if (!strm || !strm.state) { + return err(strm, Z_STREAM_ERROR); + } + + strm.total_in = strm.total_out = 0; + strm.data_type = Z_UNKNOWN; + + s = strm.state; + s.pending = 0; + s.pending_out = 0; + + if (s.wrap < 0) { + s.wrap = -s.wrap; + /* was made negative by deflate(..., Z_FINISH); */ + } + s.status = (s.wrap ? INIT_STATE : BUSY_STATE); + strm.adler = (s.wrap === 2) ? + 0 // crc32(0, Z_NULL, 0) + : + 1; // adler32(0, Z_NULL, 0) + s.last_flush = Z_NO_FLUSH; + trees._tr_init(s); + return Z_OK; +} + + +function deflateReset(strm) { + var ret = deflateResetKeep(strm); + if (ret === Z_OK) { + lm_init(strm.state); + } + return ret; +} + + +function deflateSetHeader(strm, head) { + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + if (strm.state.wrap !== 2) { return Z_STREAM_ERROR; } + strm.state.gzhead = head; + return Z_OK; +} + + +function deflateInit2(strm, level, method, windowBits, memLevel, strategy) { + if (!strm) { // === Z_NULL + return Z_STREAM_ERROR; + } + var wrap = 1; + + if (level === Z_DEFAULT_COMPRESSION) { + level = 6; + } + + if (windowBits < 0) { /* suppress zlib wrapper */ + wrap = 0; + windowBits = -windowBits; + } + + else if (windowBits > 15) { + wrap = 2; /* write gzip wrapper instead */ + windowBits -= 16; + } + + + if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED || + windowBits < 8 || windowBits > 15 || level < 0 || level > 9 || + strategy < 0 || strategy > Z_FIXED) { + return err(strm, Z_STREAM_ERROR); + } + + + if (windowBits === 8) { + windowBits = 9; + } + /* until 256-byte window bug fixed */ + + var s = new DeflateState(); + + strm.state = s; + s.strm = strm; + + s.wrap = wrap; + s.gzhead = null; + s.w_bits = windowBits; + s.w_size = 1 << s.w_bits; + s.w_mask = s.w_size - 1; + + s.hash_bits = memLevel + 7; + s.hash_size = 1 << s.hash_bits; + s.hash_mask = s.hash_size - 1; + s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH); + + s.window = new utils.Buf8(s.w_size * 2); + s.head = new utils.Buf16(s.hash_size); + s.prev = new utils.Buf16(s.w_size); + + // Don't need mem init magic for JS. + //s.high_water = 0; /* nothing written to s->window yet */ + + s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ + + s.pending_buf_size = s.lit_bufsize * 4; + + //overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); + //s->pending_buf = (uchf *) overlay; + s.pending_buf = new utils.Buf8(s.pending_buf_size); + + // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`) + //s->d_buf = overlay + s->lit_bufsize/sizeof(ush); + s.d_buf = 1 * s.lit_bufsize; + + //s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; + s.l_buf = (1 + 2) * s.lit_bufsize; + + s.level = level; + s.strategy = strategy; + s.method = method; + + return deflateReset(strm); +} + +function deflateInit(strm, level) { + return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY); +} + + +function deflate(strm, flush) { + var old_flush, s; + var beg, val; // for gzip header write only + + if (!strm || !strm.state || + flush > Z_BLOCK || flush < 0) { + return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR; + } + + s = strm.state; + + if (!strm.output || + (!strm.input && strm.avail_in !== 0) || + (s.status === FINISH_STATE && flush !== Z_FINISH)) { + return err(strm, (strm.avail_out === 0) ? Z_BUF_ERROR : Z_STREAM_ERROR); + } + + s.strm = strm; /* just in case */ + old_flush = s.last_flush; + s.last_flush = flush; + + /* Write the header */ + if (s.status === INIT_STATE) { + + if (s.wrap === 2) { // GZIP header + strm.adler = 0; //crc32(0L, Z_NULL, 0); + put_byte(s, 31); + put_byte(s, 139); + put_byte(s, 8); + if (!s.gzhead) { // s->gzhead == Z_NULL + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, OS_CODE); + s.status = BUSY_STATE; + } + else { + put_byte(s, (s.gzhead.text ? 1 : 0) + + (s.gzhead.hcrc ? 2 : 0) + + (!s.gzhead.extra ? 0 : 4) + + (!s.gzhead.name ? 0 : 8) + + (!s.gzhead.comment ? 0 : 16) + ); + put_byte(s, s.gzhead.time & 0xff); + put_byte(s, (s.gzhead.time >> 8) & 0xff); + put_byte(s, (s.gzhead.time >> 16) & 0xff); + put_byte(s, (s.gzhead.time >> 24) & 0xff); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, s.gzhead.os & 0xff); + if (s.gzhead.extra && s.gzhead.extra.length) { + put_byte(s, s.gzhead.extra.length & 0xff); + put_byte(s, (s.gzhead.extra.length >> 8) & 0xff); + } + if (s.gzhead.hcrc) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0); + } + s.gzindex = 0; + s.status = EXTRA_STATE; + } + } + else // DEFLATE header + { + var header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8; + var level_flags = -1; + + if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) { + level_flags = 0; + } else if (s.level < 6) { + level_flags = 1; + } else if (s.level === 6) { + level_flags = 2; + } else { + level_flags = 3; + } + header |= (level_flags << 6); + if (s.strstart !== 0) { header |= PRESET_DICT; } + header += 31 - (header % 31); + + s.status = BUSY_STATE; + putShortMSB(s, header); + + /* Save the adler32 of the preset dictionary: */ + if (s.strstart !== 0) { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + strm.adler = 1; // adler32(0L, Z_NULL, 0); + } + } + +//#ifdef GZIP + if (s.status === EXTRA_STATE) { + if (s.gzhead.extra/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + + while (s.gzindex < (s.gzhead.extra.length & 0xffff)) { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + break; + } + } + put_byte(s, s.gzhead.extra[s.gzindex] & 0xff); + s.gzindex++; + } + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (s.gzindex === s.gzhead.extra.length) { + s.gzindex = 0; + s.status = NAME_STATE; + } + } + else { + s.status = NAME_STATE; + } + } + if (s.status === NAME_STATE) { + if (s.gzhead.name/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.name.length) { + val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.gzindex = 0; + s.status = COMMENT_STATE; + } + } + else { + s.status = COMMENT_STATE; + } + } + if (s.status === COMMENT_STATE) { + if (s.gzhead.comment/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.comment.length) { + val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.status = HCRC_STATE; + } + } + else { + s.status = HCRC_STATE; + } + } + if (s.status === HCRC_STATE) { + if (s.gzhead.hcrc) { + if (s.pending + 2 > s.pending_buf_size) { + flush_pending(strm); + } + if (s.pending + 2 <= s.pending_buf_size) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + strm.adler = 0; //crc32(0L, Z_NULL, 0); + s.status = BUSY_STATE; + } + } + else { + s.status = BUSY_STATE; + } + } +//#endif + + /* Flush as much pending output as possible */ + if (s.pending !== 0) { + flush_pending(strm); + if (strm.avail_out === 0) { + /* Since avail_out is 0, deflate will be called again with + * more output space, but possibly with both pending and + * avail_in equal to zero. There won't be anything to do, + * but this is not an error situation so make sure we + * return OK instead of BUF_ERROR at next call of deflate: + */ + s.last_flush = -1; + return Z_OK; + } + + /* Make sure there is something to do and avoid duplicate consecutive + * flushes. For repeated and useless calls with Z_FINISH, we keep + * returning Z_STREAM_END instead of Z_BUF_ERROR. + */ + } else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) && + flush !== Z_FINISH) { + return err(strm, Z_BUF_ERROR); + } + + /* User must not provide more input after the first FINISH: */ + if (s.status === FINISH_STATE && strm.avail_in !== 0) { + return err(strm, Z_BUF_ERROR); + } + + /* Start a new block or continue the current one. + */ + if (strm.avail_in !== 0 || s.lookahead !== 0 || + (flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) { + var bstate = (s.strategy === Z_HUFFMAN_ONLY) ? deflate_huff(s, flush) : + (s.strategy === Z_RLE ? deflate_rle(s, flush) : + configuration_table[s.level].func(s, flush)); + + if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) { + s.status = FINISH_STATE; + } + if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) { + if (strm.avail_out === 0) { + s.last_flush = -1; + /* avoid BUF_ERROR next call, see above */ + } + return Z_OK; + /* If flush != Z_NO_FLUSH && avail_out == 0, the next call + * of deflate should use the same flush parameter to make sure + * that the flush is complete. So we don't have to output an + * empty block here, this will be done at next call. This also + * ensures that for a very small output buffer, we emit at most + * one empty block. + */ + } + if (bstate === BS_BLOCK_DONE) { + if (flush === Z_PARTIAL_FLUSH) { + trees._tr_align(s); + } + else if (flush !== Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */ + + trees._tr_stored_block(s, 0, 0, false); + /* For a full flush, this empty block will be recognized + * as a special marker by inflate_sync(). + */ + if (flush === Z_FULL_FLUSH) { + /*** CLEAR_HASH(s); ***/ /* forget history */ + zero(s.head); // Fill with NIL (= 0); + + if (s.lookahead === 0) { + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + } + } + flush_pending(strm); + if (strm.avail_out === 0) { + s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */ + return Z_OK; + } + } + } + //Assert(strm->avail_out > 0, "bug2"); + //if (strm.avail_out <= 0) { throw new Error("bug2");} + + if (flush !== Z_FINISH) { return Z_OK; } + if (s.wrap <= 0) { return Z_STREAM_END; } + + /* Write the trailer */ + if (s.wrap === 2) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + put_byte(s, (strm.adler >> 16) & 0xff); + put_byte(s, (strm.adler >> 24) & 0xff); + put_byte(s, strm.total_in & 0xff); + put_byte(s, (strm.total_in >> 8) & 0xff); + put_byte(s, (strm.total_in >> 16) & 0xff); + put_byte(s, (strm.total_in >> 24) & 0xff); + } + else + { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + + flush_pending(strm); + /* If avail_out is zero, the application will call deflate again + * to flush the rest. + */ + if (s.wrap > 0) { s.wrap = -s.wrap; } + /* write the trailer only once! */ + return s.pending !== 0 ? Z_OK : Z_STREAM_END; +} + +function deflateEnd(strm) { + var status; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + status = strm.state.status; + if (status !== INIT_STATE && + status !== EXTRA_STATE && + status !== NAME_STATE && + status !== COMMENT_STATE && + status !== HCRC_STATE && + status !== BUSY_STATE && + status !== FINISH_STATE + ) { + return err(strm, Z_STREAM_ERROR); + } + + strm.state = null; + + return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK; +} + + +/* ========================================================================= + * Initializes the compression dictionary from the given byte + * sequence without producing any compressed output. + */ +function deflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var s; + var str, n; + var wrap; + var avail; + var next; + var input; + var tmpDict; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + s = strm.state; + wrap = s.wrap; + + if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) { + return Z_STREAM_ERROR; + } + + /* when using zlib wrappers, compute Adler-32 for provided dictionary */ + if (wrap === 1) { + /* adler32(strm->adler, dictionary, dictLength); */ + strm.adler = adler32(strm.adler, dictionary, dictLength, 0); + } + + s.wrap = 0; /* avoid computing Adler-32 in read_buf */ + + /* if dictionary would fill window, just replace the history */ + if (dictLength >= s.w_size) { + if (wrap === 0) { /* already empty otherwise */ + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + /* use the tail */ + // dictionary = dictionary.slice(dictLength - s.w_size); + tmpDict = new utils.Buf8(s.w_size); + utils.arraySet(tmpDict, dictionary, dictLength - s.w_size, s.w_size, 0); + dictionary = tmpDict; + dictLength = s.w_size; + } + /* insert dictionary into window and hash */ + avail = strm.avail_in; + next = strm.next_in; + input = strm.input; + strm.avail_in = dictLength; + strm.next_in = 0; + strm.input = dictionary; + fill_window(s); + while (s.lookahead >= MIN_MATCH) { + str = s.strstart; + n = s.lookahead - (MIN_MATCH - 1); + do { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + + s.head[s.ins_h] = str; + str++; + } while (--n); + s.strstart = str; + s.lookahead = MIN_MATCH - 1; + fill_window(s); + } + s.strstart += s.lookahead; + s.block_start = s.strstart; + s.insert = s.lookahead; + s.lookahead = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + strm.next_in = next; + strm.input = input; + strm.avail_in = avail; + s.wrap = wrap; + return Z_OK; +} + + +exports.deflateInit = deflateInit; +exports.deflateInit2 = deflateInit2; +exports.deflateReset = deflateReset; +exports.deflateResetKeep = deflateResetKeep; +exports.deflateSetHeader = deflateSetHeader; +exports.deflate = deflate; +exports.deflateEnd = deflateEnd; +exports.deflateSetDictionary = deflateSetDictionary; +exports.deflateInfo = 'pako deflate (from Nodeca project)'; + +/* Not implemented +exports.deflateBound = deflateBound; +exports.deflateCopy = deflateCopy; +exports.deflateParams = deflateParams; +exports.deflatePending = deflatePending; +exports.deflatePrime = deflatePrime; +exports.deflateTune = deflateTune; +*/ + + +/***/ }), + +/***/ 35105: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +function GZheader() { + /* true if compressed data believed to be text */ + this.text = 0; + /* modification time */ + this.time = 0; + /* extra flags (not used when writing a gzip file) */ + this.xflags = 0; + /* operating system */ + this.os = 0; + /* pointer to extra field or Z_NULL if none */ + this.extra = null; + /* extra field length (valid if extra != Z_NULL) */ + this.extra_len = 0; // Actually, we don't need it in JS, + // but leave for few code modifications + + // + // Setup limits is not necessary because in js we should not preallocate memory + // for inflate use constant limit in 65536 bytes + // + + /* space at extra (only when reading header) */ + // this.extra_max = 0; + /* pointer to zero-terminated file name or Z_NULL */ + this.name = ''; + /* space at name (only when reading header) */ + // this.name_max = 0; + /* pointer to zero-terminated comment or Z_NULL */ + this.comment = ''; + /* space at comment (only when reading header) */ + // this.comm_max = 0; + /* true if there was or will be a header crc */ + this.hcrc = 0; + /* true when done reading gzip header (not used when writing a gzip file) */ + this.done = false; +} + +module.exports = GZheader; + + +/***/ }), + +/***/ 65349: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +// See state defs from inflate.js +var BAD = 30; /* got a data error -- remain here until reset */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ + +/* + Decode literal, length, and distance codes and write out the resulting + literal and match bytes until either not enough input or output is + available, an end-of-block is encountered, or a data error is encountered. + When large enough input and output buffers are supplied to inflate(), for + example, a 16K input buffer and a 64K output buffer, more than 95% of the + inflate execution time is spent in this routine. + + Entry assumptions: + + state.mode === LEN + strm.avail_in >= 6 + strm.avail_out >= 258 + start >= strm.avail_out + state.bits < 8 + + On return, state.mode is one of: + + LEN -- ran out of enough output space or enough available input + TYPE -- reached end of block code, inflate() to interpret next block + BAD -- error in block data + + Notes: + + - The maximum input bits used by a length/distance pair is 15 bits for the + length code, 5 bits for the length extra, 15 bits for the distance code, + and 13 bits for the distance extra. This totals 48 bits, or six bytes. + Therefore if strm.avail_in >= 6, then there is enough input to avoid + checking for available input while decoding. + + - The maximum bytes that a single length/distance pair can output is 258 + bytes, which is the maximum length that can be coded. inflate_fast() + requires strm.avail_out >= 258 for each loop to avoid checking for + output space. + */ +module.exports = function inflate_fast(strm, start) { + var state; + var _in; /* local strm.input */ + var last; /* have enough input while in < last */ + var _out; /* local strm.output */ + var beg; /* inflate()'s initial strm.output */ + var end; /* while out < end, enough space available */ +//#ifdef INFLATE_STRICT + var dmax; /* maximum distance from zlib header */ +//#endif + var wsize; /* window size or zero if not using window */ + var whave; /* valid bytes in the window */ + var wnext; /* window write index */ + // Use `s_window` instead `window`, avoid conflict with instrumentation tools + var s_window; /* allocated sliding window, if wsize != 0 */ + var hold; /* local strm.hold */ + var bits; /* local strm.bits */ + var lcode; /* local strm.lencode */ + var dcode; /* local strm.distcode */ + var lmask; /* mask for first level of length codes */ + var dmask; /* mask for first level of distance codes */ + var here; /* retrieved table entry */ + var op; /* code bits, operation, extra bits, or */ + /* window position, window bytes to copy */ + var len; /* match length, unused bytes */ + var dist; /* match distance */ + var from; /* where to copy match from */ + var from_source; + + + var input, output; // JS specific, because we have no pointers + + /* copy state to local variables */ + state = strm.state; + //here = state.here; + _in = strm.next_in; + input = strm.input; + last = _in + (strm.avail_in - 5); + _out = strm.next_out; + output = strm.output; + beg = _out - (start - strm.avail_out); + end = _out + (strm.avail_out - 257); +//#ifdef INFLATE_STRICT + dmax = state.dmax; +//#endif + wsize = state.wsize; + whave = state.whave; + wnext = state.wnext; + s_window = state.window; + hold = state.hold; + bits = state.bits; + lcode = state.lencode; + dcode = state.distcode; + lmask = (1 << state.lenbits) - 1; + dmask = (1 << state.distbits) - 1; + + + /* decode literals and length/distances until end-of-block or not enough + input data or output space */ + + top: + do { + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + + here = lcode[hold & lmask]; + + dolen: + for (;;) { // Goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + if (op === 0) { /* literal */ + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + output[_out++] = here & 0xffff/*here.val*/; + } + else if (op & 16) { /* length base */ + len = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (op) { + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + len += hold & ((1 << op) - 1); + hold >>>= op; + bits -= op; + } + //Tracevv((stderr, "inflate: length %u\n", len)); + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + here = dcode[hold & dmask]; + + dodist: + for (;;) { // goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + + if (op & 16) { /* distance base */ + dist = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + } + dist += hold & ((1 << op) - 1); +//#ifdef INFLATE_STRICT + if (dist > dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } +//#endif + hold >>>= op; + bits -= op; + //Tracevv((stderr, "inflate: distance %u\n", dist)); + op = _out - beg; /* max distance in output */ + if (dist > op) { /* see if copy from window */ + op = dist - op; /* distance back in window */ + if (op > whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } + +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// if (len <= op - whave) { +// do { +// output[_out++] = 0; +// } while (--len); +// continue top; +// } +// len -= op - whave; +// do { +// output[_out++] = 0; +// } while (--op > whave); +// if (op === 0) { +// from = _out - dist; +// do { +// output[_out++] = output[from++]; +// } while (--len); +// continue top; +// } +//#endif + } + from = 0; // window index + from_source = s_window; + if (wnext === 0) { /* very common case */ + from += wsize - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + else if (wnext < op) { /* wrap around window */ + from += wsize + wnext - op; + op -= wnext; + if (op < len) { /* some from end of window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = 0; + if (wnext < len) { /* some from start of window */ + op = wnext; + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + } + else { /* contiguous in window */ + from += wnext - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + while (len > 2) { + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + len -= 3; + } + if (len) { + output[_out++] = from_source[from++]; + if (len > 1) { + output[_out++] = from_source[from++]; + } + } + } + else { + from = _out - dist; /* copy direct from output */ + do { /* minimum length is three */ + output[_out++] = output[from++]; + output[_out++] = output[from++]; + output[_out++] = output[from++]; + len -= 3; + } while (len > 2); + if (len) { + output[_out++] = output[from++]; + if (len > 1) { + output[_out++] = output[from++]; + } + } + } + } + else if ((op & 64) === 0) { /* 2nd level distance code */ + here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dodist; + } + else { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } + else if ((op & 64) === 0) { /* 2nd level length code */ + here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dolen; + } + else if (op & 32) { /* end-of-block */ + //Tracevv((stderr, "inflate: end of block\n")); + state.mode = TYPE; + break top; + } + else { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } while (_in < last && _out < end); + + /* return unused bytes (on entry, bits < 8, so in won't go too far back) */ + len = bits >> 3; + _in -= len; + bits -= len << 3; + hold &= (1 << bits) - 1; + + /* update state and return */ + strm.next_in = _in; + strm.next_out = _out; + strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last)); + strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end)); + state.hold = hold; + state.bits = bits; + return; +}; + + +/***/ }), + +/***/ 90409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __nccwpck_require__(5483); +var adler32 = __nccwpck_require__(86924); +var crc32 = __nccwpck_require__(87242); +var inflate_fast = __nccwpck_require__(65349); +var inflate_table = __nccwpck_require__(56895); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +//var Z_NO_FLUSH = 0; +//var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +//var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + +/* The deflate compression method */ +var Z_DEFLATED = 8; + + +/* STATES ====================================================================*/ +/* ===========================================================================*/ + + +var HEAD = 1; /* i: waiting for magic header */ +var FLAGS = 2; /* i: waiting for method and flags (gzip) */ +var TIME = 3; /* i: waiting for modification time (gzip) */ +var OS = 4; /* i: waiting for extra flags and operating system (gzip) */ +var EXLEN = 5; /* i: waiting for extra length (gzip) */ +var EXTRA = 6; /* i: waiting for extra bytes (gzip) */ +var NAME = 7; /* i: waiting for end of file name (gzip) */ +var COMMENT = 8; /* i: waiting for end of comment (gzip) */ +var HCRC = 9; /* i: waiting for header crc (gzip) */ +var DICTID = 10; /* i: waiting for dictionary check value */ +var DICT = 11; /* waiting for inflateSetDictionary() call */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ +var TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */ +var STORED = 14; /* i: waiting for stored size (length and complement) */ +var COPY_ = 15; /* i/o: same as COPY below, but only first time in */ +var COPY = 16; /* i/o: waiting for input or output to copy stored block */ +var TABLE = 17; /* i: waiting for dynamic block table lengths */ +var LENLENS = 18; /* i: waiting for code length code lengths */ +var CODELENS = 19; /* i: waiting for length/lit and distance code lengths */ +var LEN_ = 20; /* i: same as LEN below, but only first time in */ +var LEN = 21; /* i: waiting for length/lit/eob code */ +var LENEXT = 22; /* i: waiting for length extra bits */ +var DIST = 23; /* i: waiting for distance code */ +var DISTEXT = 24; /* i: waiting for distance extra bits */ +var MATCH = 25; /* o: waiting for output space to copy string */ +var LIT = 26; /* o: waiting for output space to write literal */ +var CHECK = 27; /* i: waiting for 32-bit check value */ +var LENGTH = 28; /* i: waiting for 32-bit length (gzip) */ +var DONE = 29; /* finished check, done -- remain here until reset */ +var BAD = 30; /* got a data error -- remain here until reset */ +var MEM = 31; /* got an inflate() memory error -- remain here until reset */ +var SYNC = 32; /* looking for synchronization bytes to restart inflate() */ + +/* ===========================================================================*/ + + + +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_WBITS = MAX_WBITS; + + +function zswap32(q) { + return (((q >>> 24) & 0xff) + + ((q >>> 8) & 0xff00) + + ((q & 0xff00) << 8) + + ((q & 0xff) << 24)); +} + + +function InflateState() { + this.mode = 0; /* current inflate mode */ + this.last = false; /* true if processing last block */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.havedict = false; /* true if dictionary provided */ + this.flags = 0; /* gzip header method and flags (0 if zlib) */ + this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */ + this.check = 0; /* protected copy of check value */ + this.total = 0; /* protected copy of output count */ + // TODO: may be {} + this.head = null; /* where to save gzip header information */ + + /* sliding window */ + this.wbits = 0; /* log base 2 of requested window size */ + this.wsize = 0; /* window size or zero if not using window */ + this.whave = 0; /* valid bytes in the window */ + this.wnext = 0; /* window write index */ + this.window = null; /* allocated sliding window, if needed */ + + /* bit accumulator */ + this.hold = 0; /* input bit accumulator */ + this.bits = 0; /* number of bits in "in" */ + + /* for string and stored block copying */ + this.length = 0; /* literal or length of data to copy */ + this.offset = 0; /* distance back to copy string from */ + + /* for table and code decoding */ + this.extra = 0; /* extra bits needed */ + + /* fixed and dynamic code tables */ + this.lencode = null; /* starting table for length/literal codes */ + this.distcode = null; /* starting table for distance codes */ + this.lenbits = 0; /* index bits for lencode */ + this.distbits = 0; /* index bits for distcode */ + + /* dynamic table building */ + this.ncode = 0; /* number of code length code lengths */ + this.nlen = 0; /* number of length code lengths */ + this.ndist = 0; /* number of distance code lengths */ + this.have = 0; /* number of code lengths in lens[] */ + this.next = null; /* next available space in codes[] */ + + this.lens = new utils.Buf16(320); /* temporary storage for code lengths */ + this.work = new utils.Buf16(288); /* work area for code table building */ + + /* + because we don't have pointers in js, we use lencode and distcode directly + as buffers so we don't need codes + */ + //this.codes = new utils.Buf32(ENOUGH); /* space for code tables */ + this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */ + this.distdyn = null; /* dynamic table for distance codes (JS specific) */ + this.sane = 0; /* if false, allow invalid distance too far */ + this.back = 0; /* bits back of last unprocessed length/lit */ + this.was = 0; /* initial length of match */ +} + +function inflateResetKeep(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + strm.total_in = strm.total_out = state.total = 0; + strm.msg = ''; /*Z_NULL*/ + if (state.wrap) { /* to support ill-conceived Java test suite */ + strm.adler = state.wrap & 1; + } + state.mode = HEAD; + state.last = 0; + state.havedict = 0; + state.dmax = 32768; + state.head = null/*Z_NULL*/; + state.hold = 0; + state.bits = 0; + //state.lencode = state.distcode = state.next = state.codes; + state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS); + state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS); + + state.sane = 1; + state.back = -1; + //Tracev((stderr, "inflate: reset\n")); + return Z_OK; +} + +function inflateReset(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + state.wsize = 0; + state.whave = 0; + state.wnext = 0; + return inflateResetKeep(strm); + +} + +function inflateReset2(strm, windowBits) { + var wrap; + var state; + + /* get the state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + + /* extract wrap request from windowBits parameter */ + if (windowBits < 0) { + wrap = 0; + windowBits = -windowBits; + } + else { + wrap = (windowBits >> 4) + 1; + if (windowBits < 48) { + windowBits &= 15; + } + } + + /* set number of window bits, free window if different */ + if (windowBits && (windowBits < 8 || windowBits > 15)) { + return Z_STREAM_ERROR; + } + if (state.window !== null && state.wbits !== windowBits) { + state.window = null; + } + + /* update state and reset the rest of it */ + state.wrap = wrap; + state.wbits = windowBits; + return inflateReset(strm); +} + +function inflateInit2(strm, windowBits) { + var ret; + var state; + + if (!strm) { return Z_STREAM_ERROR; } + //strm.msg = Z_NULL; /* in case we return an error */ + + state = new InflateState(); + + //if (state === Z_NULL) return Z_MEM_ERROR; + //Tracev((stderr, "inflate: allocated\n")); + strm.state = state; + state.window = null/*Z_NULL*/; + ret = inflateReset2(strm, windowBits); + if (ret !== Z_OK) { + strm.state = null/*Z_NULL*/; + } + return ret; +} + +function inflateInit(strm) { + return inflateInit2(strm, DEF_WBITS); +} + + +/* + Return state with length and distance decoding tables and index sizes set to + fixed code decoding. Normally this returns fixed tables from inffixed.h. + If BUILDFIXED is defined, then instead this routine builds the tables the + first time it's called, and returns those tables the first time and + thereafter. This reduces the size of the code by about 2K bytes, in + exchange for a little execution time. However, BUILDFIXED should not be + used for threaded applications, since the rewriting of the tables and virgin + may not be thread-safe. + */ +var virgin = true; + +var lenfix, distfix; // We have no pointers in JS, so keep tables separate + +function fixedtables(state) { + /* build fixed huffman tables if first call (may not be thread safe) */ + if (virgin) { + var sym; + + lenfix = new utils.Buf32(512); + distfix = new utils.Buf32(32); + + /* literal/length table */ + sym = 0; + while (sym < 144) { state.lens[sym++] = 8; } + while (sym < 256) { state.lens[sym++] = 9; } + while (sym < 280) { state.lens[sym++] = 7; } + while (sym < 288) { state.lens[sym++] = 8; } + + inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 }); + + /* distance table */ + sym = 0; + while (sym < 32) { state.lens[sym++] = 5; } + + inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 }); + + /* do this just once */ + virgin = false; + } + + state.lencode = lenfix; + state.lenbits = 9; + state.distcode = distfix; + state.distbits = 5; +} + + +/* + Update the window with the last wsize (normally 32K) bytes written before + returning. If window does not exist yet, create it. This is only called + when a window is already in use, or when output has been written during this + inflate call, but the end of the deflate stream has not been reached yet. + It is also called to create a window for dictionary data when a dictionary + is loaded. + + Providing output buffers larger than 32K to inflate() should provide a speed + advantage, since only the last 32K of output is copied to the sliding window + upon return from inflate(), and since all distances after the first 32K of + output will fall in the output data, making match copies simpler and faster. + The advantage may be dependent on the size of the processor's data caches. + */ +function updatewindow(strm, src, end, copy) { + var dist; + var state = strm.state; + + /* if it hasn't been done already, allocate space for the window */ + if (state.window === null) { + state.wsize = 1 << state.wbits; + state.wnext = 0; + state.whave = 0; + + state.window = new utils.Buf8(state.wsize); + } + + /* copy state->wsize or less output bytes into the circular window */ + if (copy >= state.wsize) { + utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0); + state.wnext = 0; + state.whave = state.wsize; + } + else { + dist = state.wsize - state.wnext; + if (dist > copy) { + dist = copy; + } + //zmemcpy(state->window + state->wnext, end - copy, dist); + utils.arraySet(state.window, src, end - copy, dist, state.wnext); + copy -= dist; + if (copy) { + //zmemcpy(state->window, end - copy, copy); + utils.arraySet(state.window, src, end - copy, copy, 0); + state.wnext = copy; + state.whave = state.wsize; + } + else { + state.wnext += dist; + if (state.wnext === state.wsize) { state.wnext = 0; } + if (state.whave < state.wsize) { state.whave += dist; } + } + } + return 0; +} + +function inflate(strm, flush) { + var state; + var input, output; // input/output buffers + var next; /* next input INDEX */ + var put; /* next output INDEX */ + var have, left; /* available input and output */ + var hold; /* bit buffer */ + var bits; /* bits in bit buffer */ + var _in, _out; /* save starting available input and output */ + var copy; /* number of stored or match bytes to copy */ + var from; /* where to copy match bytes from */ + var from_source; + var here = 0; /* current decoding table entry */ + var here_bits, here_op, here_val; // paked "here" denormalized (JS specific) + //var last; /* parent table entry */ + var last_bits, last_op, last_val; // paked "last" denormalized (JS specific) + var len; /* length to copy for repeats, bits to drop */ + var ret; /* return code */ + var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */ + var opts; + + var n; // temporary var for NEED_BITS + + var order = /* permutation of code lengths */ + [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]; + + + if (!strm || !strm.state || !strm.output || + (!strm.input && strm.avail_in !== 0)) { + return Z_STREAM_ERROR; + } + + state = strm.state; + if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */ + + + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + _in = have; + _out = left; + ret = Z_OK; + + inf_leave: // goto emulation + for (;;) { + switch (state.mode) { + case HEAD: + if (state.wrap === 0) { + state.mode = TYPEDO; + break; + } + //=== NEEDBITS(16); + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */ + state.check = 0/*crc32(0L, Z_NULL, 0)*/; + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = FLAGS; + break; + } + state.flags = 0; /* expect zlib header */ + if (state.head) { + state.head.done = false; + } + if (!(state.wrap & 1) || /* check if zlib header allowed */ + (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) { + strm.msg = 'incorrect header check'; + state.mode = BAD; + break; + } + if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// + len = (hold & 0x0f)/*BITS(4)*/ + 8; + if (state.wbits === 0) { + state.wbits = len; + } + else if (len > state.wbits) { + strm.msg = 'invalid window size'; + state.mode = BAD; + break; + } + state.dmax = 1 << len; + //Tracev((stderr, "inflate: zlib header ok\n")); + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = hold & 0x200 ? DICTID : TYPE; + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + break; + case FLAGS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.flags = hold; + if ((state.flags & 0xff) !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + if (state.flags & 0xe000) { + strm.msg = 'unknown header flags set'; + state.mode = BAD; + break; + } + if (state.head) { + state.head.text = ((hold >> 8) & 1); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = TIME; + /* falls through */ + case TIME: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.time = hold; + } + if (state.flags & 0x0200) { + //=== CRC4(state.check, hold) + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + hbuf[2] = (hold >>> 16) & 0xff; + hbuf[3] = (hold >>> 24) & 0xff; + state.check = crc32(state.check, hbuf, 4, 0); + //=== + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = OS; + /* falls through */ + case OS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.xflags = (hold & 0xff); + state.head.os = (hold >> 8); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = EXLEN; + /* falls through */ + case EXLEN: + if (state.flags & 0x0400) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length = hold; + if (state.head) { + state.head.extra_len = hold; + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + else if (state.head) { + state.head.extra = null/*Z_NULL*/; + } + state.mode = EXTRA; + /* falls through */ + case EXTRA: + if (state.flags & 0x0400) { + copy = state.length; + if (copy > have) { copy = have; } + if (copy) { + if (state.head) { + len = state.head.extra_len - state.length; + if (!state.head.extra) { + // Use untyped array for more convenient processing later + state.head.extra = new Array(state.head.extra_len); + } + utils.arraySet( + state.head.extra, + input, + next, + // extra field is limited to 65536 bytes + // - no need for additional size check + copy, + /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/ + len + ); + //zmemcpy(state.head.extra + len, next, + // len + copy > state.head.extra_max ? + // state.head.extra_max - len : copy); + } + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + state.length -= copy; + } + if (state.length) { break inf_leave; } + } + state.length = 0; + state.mode = NAME; + /* falls through */ + case NAME: + if (state.flags & 0x0800) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + // TODO: 2 or 1 bytes? + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.name_max*/)) { + state.head.name += String.fromCharCode(len); + } + } while (len && copy < have); + + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.name = null; + } + state.length = 0; + state.mode = COMMENT; + /* falls through */ + case COMMENT: + if (state.flags & 0x1000) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.comm_max*/)) { + state.head.comment += String.fromCharCode(len); + } + } while (len && copy < have); + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.comment = null; + } + state.mode = HCRC; + /* falls through */ + case HCRC: + if (state.flags & 0x0200) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.check & 0xffff)) { + strm.msg = 'header crc mismatch'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + if (state.head) { + state.head.hcrc = ((state.flags >> 9) & 1); + state.head.done = true; + } + strm.adler = state.check = 0; + state.mode = TYPE; + break; + case DICTID: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + strm.adler = state.check = zswap32(hold); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = DICT; + /* falls through */ + case DICT: + if (state.havedict === 0) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + return Z_NEED_DICT; + } + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = TYPE; + /* falls through */ + case TYPE: + if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; } + /* falls through */ + case TYPEDO: + if (state.last) { + //--- BYTEBITS() ---// + hold >>>= bits & 7; + bits -= bits & 7; + //---// + state.mode = CHECK; + break; + } + //=== NEEDBITS(3); */ + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.last = (hold & 0x01)/*BITS(1)*/; + //--- DROPBITS(1) ---// + hold >>>= 1; + bits -= 1; + //---// + + switch ((hold & 0x03)/*BITS(2)*/) { + case 0: /* stored block */ + //Tracev((stderr, "inflate: stored block%s\n", + // state.last ? " (last)" : "")); + state.mode = STORED; + break; + case 1: /* fixed block */ + fixedtables(state); + //Tracev((stderr, "inflate: fixed codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = LEN_; /* decode codes */ + if (flush === Z_TREES) { + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break inf_leave; + } + break; + case 2: /* dynamic block */ + //Tracev((stderr, "inflate: dynamic codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = TABLE; + break; + case 3: + strm.msg = 'invalid block type'; + state.mode = BAD; + } + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break; + case STORED: + //--- BYTEBITS() ---// /* go to byte boundary */ + hold >>>= bits & 7; + bits -= bits & 7; + //---// + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) { + strm.msg = 'invalid stored block lengths'; + state.mode = BAD; + break; + } + state.length = hold & 0xffff; + //Tracev((stderr, "inflate: stored length %u\n", + // state.length)); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = COPY_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case COPY_: + state.mode = COPY; + /* falls through */ + case COPY: + copy = state.length; + if (copy) { + if (copy > have) { copy = have; } + if (copy > left) { copy = left; } + if (copy === 0) { break inf_leave; } + //--- zmemcpy(put, next, copy); --- + utils.arraySet(output, input, next, copy, put); + //---// + have -= copy; + next += copy; + left -= copy; + put += copy; + state.length -= copy; + break; + } + //Tracev((stderr, "inflate: stored end\n")); + state.mode = TYPE; + break; + case TABLE: + //=== NEEDBITS(14); */ + while (bits < 14) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4; + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// +//#ifndef PKZIP_BUG_WORKAROUND + if (state.nlen > 286 || state.ndist > 30) { + strm.msg = 'too many length or distance symbols'; + state.mode = BAD; + break; + } +//#endif + //Tracev((stderr, "inflate: table sizes ok\n")); + state.have = 0; + state.mode = LENLENS; + /* falls through */ + case LENLENS: + while (state.have < state.ncode) { + //=== NEEDBITS(3); + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.lens[order[state.have++]] = (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + while (state.have < 19) { + state.lens[order[state.have++]] = 0; + } + // We have separate tables & no pointers. 2 commented lines below not needed. + //state.next = state.codes; + //state.lencode = state.next; + // Switch to use dynamic table + state.lencode = state.lendyn; + state.lenbits = 7; + + opts = { bits: state.lenbits }; + ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts); + state.lenbits = opts.bits; + + if (ret) { + strm.msg = 'invalid code lengths set'; + state.mode = BAD; + break; + } + //Tracev((stderr, "inflate: code lengths ok\n")); + state.have = 0; + state.mode = CODELENS; + /* falls through */ + case CODELENS: + while (state.have < state.nlen + state.ndist) { + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_val < 16) { + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.lens[state.have++] = here_val; + } + else { + if (here_val === 16) { + //=== NEEDBITS(here.bits + 2); + n = here_bits + 2; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + if (state.have === 0) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + len = state.lens[state.have - 1]; + copy = 3 + (hold & 0x03);//BITS(2); + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + } + else if (here_val === 17) { + //=== NEEDBITS(here.bits + 3); + n = here_bits + 3; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 3 + (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + else { + //=== NEEDBITS(here.bits + 7); + n = here_bits + 7; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 11 + (hold & 0x7f);//BITS(7); + //--- DROPBITS(7) ---// + hold >>>= 7; + bits -= 7; + //---// + } + if (state.have + copy > state.nlen + state.ndist) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + while (copy--) { + state.lens[state.have++] = len; + } + } + } + + /* handle error breaks in while */ + if (state.mode === BAD) { break; } + + /* check for end-of-block code (better have one) */ + if (state.lens[256] === 0) { + strm.msg = 'invalid code -- missing end-of-block'; + state.mode = BAD; + break; + } + + /* build code tables -- note: do not change the lenbits or distbits + values here (9 and 6) without reading the comments in inftrees.h + concerning the ENOUGH constants, which depend on those values */ + state.lenbits = 9; + + opts = { bits: state.lenbits }; + ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.lenbits = opts.bits; + // state.lencode = state.next; + + if (ret) { + strm.msg = 'invalid literal/lengths set'; + state.mode = BAD; + break; + } + + state.distbits = 6; + //state.distcode.copy(state.codes); + // Switch to use dynamic table + state.distcode = state.distdyn; + opts = { bits: state.distbits }; + ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.distbits = opts.bits; + // state.distcode = state.next; + + if (ret) { + strm.msg = 'invalid distances set'; + state.mode = BAD; + break; + } + //Tracev((stderr, 'inflate: codes ok\n')); + state.mode = LEN_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case LEN_: + state.mode = LEN; + /* falls through */ + case LEN: + if (have >= 6 && left >= 258) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + inflate_fast(strm, _out); + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + if (state.mode === TYPE) { + state.back = -1; + } + break; + } + state.back = 0; + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if (here_bits <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_op && (here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.lencode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + state.length = here_val; + if (here_op === 0) { + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + state.mode = LIT; + break; + } + if (here_op & 32) { + //Tracevv((stderr, "inflate: end of block\n")); + state.back = -1; + state.mode = TYPE; + break; + } + if (here_op & 64) { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break; + } + state.extra = here_op & 15; + state.mode = LENEXT; + /* falls through */ + case LENEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } + //Tracevv((stderr, "inflate: length %u\n", state.length)); + state.was = state.length; + state.mode = DIST; + /* falls through */ + case DIST: + for (;;) { + here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if ((here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.distcode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + if (here_op & 64) { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break; + } + state.offset = here_val; + state.extra = (here_op) & 15; + state.mode = DISTEXT; + /* falls through */ + case DISTEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } +//#ifdef INFLATE_STRICT + if (state.offset > state.dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +//#endif + //Tracevv((stderr, "inflate: distance %u\n", state.offset)); + state.mode = MATCH; + /* falls through */ + case MATCH: + if (left === 0) { break inf_leave; } + copy = _out - left; + if (state.offset > copy) { /* copy from window */ + copy = state.offset - copy; + if (copy > state.whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// Trace((stderr, "inflate.c too far\n")); +// copy -= state.whave; +// if (copy > state.length) { copy = state.length; } +// if (copy > left) { copy = left; } +// left -= copy; +// state.length -= copy; +// do { +// output[put++] = 0; +// } while (--copy); +// if (state.length === 0) { state.mode = LEN; } +// break; +//#endif + } + if (copy > state.wnext) { + copy -= state.wnext; + from = state.wsize - copy; + } + else { + from = state.wnext - copy; + } + if (copy > state.length) { copy = state.length; } + from_source = state.window; + } + else { /* copy from output */ + from_source = output; + from = put - state.offset; + copy = state.length; + } + if (copy > left) { copy = left; } + left -= copy; + state.length -= copy; + do { + output[put++] = from_source[from++]; + } while (--copy); + if (state.length === 0) { state.mode = LEN; } + break; + case LIT: + if (left === 0) { break inf_leave; } + output[put++] = state.length; + left--; + state.mode = LEN; + break; + case CHECK: + if (state.wrap) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + // Use '|' instead of '+' to make sure that result is signed + hold |= input[next++] << bits; + bits += 8; + } + //===// + _out -= left; + strm.total_out += _out; + state.total += _out; + if (_out) { + strm.adler = state.check = + /*UPDATE(state.check, put - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out)); + + } + _out = left; + // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too + if ((state.flags ? hold : zswap32(hold)) !== state.check) { + strm.msg = 'incorrect data check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: check matches trailer\n")); + } + state.mode = LENGTH; + /* falls through */ + case LENGTH: + if (state.wrap && state.flags) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.total & 0xffffffff)) { + strm.msg = 'incorrect length check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: length matches trailer\n")); + } + state.mode = DONE; + /* falls through */ + case DONE: + ret = Z_STREAM_END; + break inf_leave; + case BAD: + ret = Z_DATA_ERROR; + break inf_leave; + case MEM: + return Z_MEM_ERROR; + case SYNC: + /* falls through */ + default: + return Z_STREAM_ERROR; + } + } + + // inf_leave <- here is real place for "goto inf_leave", emulated via "break inf_leave" + + /* + Return from inflate(), updating the total counts and the check value. + If there was no progress during the inflate() call, return a buffer + error. Call updatewindow() to create and/or update the window state. + Note: a memory error from inflate() is non-recoverable. + */ + + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + + if (state.wsize || (_out !== strm.avail_out && state.mode < BAD && + (state.mode < CHECK || flush !== Z_FINISH))) { + if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) { + state.mode = MEM; + return Z_MEM_ERROR; + } + } + _in -= strm.avail_in; + _out -= strm.avail_out; + strm.total_in += _in; + strm.total_out += _out; + state.total += _out; + if (state.wrap && _out) { + strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out)); + } + strm.data_type = state.bits + (state.last ? 64 : 0) + + (state.mode === TYPE ? 128 : 0) + + (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0); + if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) { + ret = Z_BUF_ERROR; + } + return ret; +} + +function inflateEnd(strm) { + + if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) { + return Z_STREAM_ERROR; + } + + var state = strm.state; + if (state.window) { + state.window = null; + } + strm.state = null; + return Z_OK; +} + +function inflateGetHeader(strm, head) { + var state; + + /* check state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; } + + /* save header structure */ + state.head = head; + head.done = false; + return Z_OK; +} + +function inflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var state; + var dictid; + var ret; + + /* check state */ + if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; } + state = strm.state; + + if (state.wrap !== 0 && state.mode !== DICT) { + return Z_STREAM_ERROR; + } + + /* check for correct dictionary identifier */ + if (state.mode === DICT) { + dictid = 1; /* adler32(0, null, 0)*/ + /* dictid = adler32(dictid, dictionary, dictLength); */ + dictid = adler32(dictid, dictionary, dictLength, 0); + if (dictid !== state.check) { + return Z_DATA_ERROR; + } + } + /* copy dictionary to window using updatewindow(), which will amend the + existing dictionary if appropriate */ + ret = updatewindow(strm, dictionary, dictLength, dictLength); + if (ret) { + state.mode = MEM; + return Z_MEM_ERROR; + } + state.havedict = 1; + // Tracev((stderr, "inflate: dictionary set\n")); + return Z_OK; +} + +exports.inflateReset = inflateReset; +exports.inflateReset2 = inflateReset2; +exports.inflateResetKeep = inflateResetKeep; +exports.inflateInit = inflateInit; +exports.inflateInit2 = inflateInit2; +exports.inflate = inflate; +exports.inflateEnd = inflateEnd; +exports.inflateGetHeader = inflateGetHeader; +exports.inflateSetDictionary = inflateSetDictionary; +exports.inflateInfo = 'pako inflate (from Nodeca project)'; + +/* Not implemented +exports.inflateCopy = inflateCopy; +exports.inflateGetDictionary = inflateGetDictionary; +exports.inflateMark = inflateMark; +exports.inflatePrime = inflatePrime; +exports.inflateSync = inflateSync; +exports.inflateSyncPoint = inflateSyncPoint; +exports.inflateUndermine = inflateUndermine; +*/ + + +/***/ }), + +/***/ 56895: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __nccwpck_require__(5483); + +var MAXBITS = 15; +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +var lbase = [ /* Length codes 257..285 base */ + 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, + 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0 +]; + +var lext = [ /* Length codes 257..285 extra */ + 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, + 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78 +]; + +var dbase = [ /* Distance codes 0..29 base */ + 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, + 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, + 8193, 12289, 16385, 24577, 0, 0 +]; + +var dext = [ /* Distance codes 0..29 extra */ + 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, + 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, + 28, 28, 29, 29, 64, 64 +]; + +module.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts) +{ + var bits = opts.bits; + //here = opts.here; /* table entry for duplication */ + + var len = 0; /* a code's length in bits */ + var sym = 0; /* index of code symbols */ + var min = 0, max = 0; /* minimum and maximum code lengths */ + var root = 0; /* number of index bits for root table */ + var curr = 0; /* number of index bits for current table */ + var drop = 0; /* code bits to drop for sub-table */ + var left = 0; /* number of prefix codes available */ + var used = 0; /* code entries in table used */ + var huff = 0; /* Huffman code */ + var incr; /* for incrementing code, index */ + var fill; /* index for replicating entries */ + var low; /* low bits for current root entry */ + var mask; /* mask for low root bits */ + var next; /* next available space in table */ + var base = null; /* base value table to use */ + var base_index = 0; +// var shoextra; /* extra bits table to use */ + var end; /* use base and extra for symbol > end */ + var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */ + var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */ + var extra = null; + var extra_index = 0; + + var here_bits, here_op, here_val; + + /* + Process a set of code lengths to create a canonical Huffman code. The + code lengths are lens[0..codes-1]. Each length corresponds to the + symbols 0..codes-1. The Huffman code is generated by first sorting the + symbols by length from short to long, and retaining the symbol order + for codes with equal lengths. Then the code starts with all zero bits + for the first code of the shortest length, and the codes are integer + increments for the same length, and zeros are appended as the length + increases. For the deflate format, these bits are stored backwards + from their more natural integer increment ordering, and so when the + decoding tables are built in the large loop below, the integer codes + are incremented backwards. + + This routine assumes, but does not check, that all of the entries in + lens[] are in the range 0..MAXBITS. The caller must assure this. + 1..MAXBITS is interpreted as that code length. zero means that that + symbol does not occur in this code. + + The codes are sorted by computing a count of codes for each length, + creating from that a table of starting indices for each length in the + sorted table, and then entering the symbols in order in the sorted + table. The sorted table is work[], with that space being provided by + the caller. + + The length counts are used for other purposes as well, i.e. finding + the minimum and maximum length codes, determining if there are any + codes at all, checking for a valid set of lengths, and looking ahead + at length counts to determine sub-table sizes when building the + decoding tables. + */ + + /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */ + for (len = 0; len <= MAXBITS; len++) { + count[len] = 0; + } + for (sym = 0; sym < codes; sym++) { + count[lens[lens_index + sym]]++; + } + + /* bound code lengths, force root to be within code lengths */ + root = bits; + for (max = MAXBITS; max >= 1; max--) { + if (count[max] !== 0) { break; } + } + if (root > max) { + root = max; + } + if (max === 0) { /* no symbols to code at all */ + //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */ + //table.bits[opts.table_index] = 1; //here.bits = (var char)1; + //table.val[opts.table_index++] = 0; //here.val = (var short)0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + + //table.op[opts.table_index] = 64; + //table.bits[opts.table_index] = 1; + //table.val[opts.table_index++] = 0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + opts.bits = 1; + return 0; /* no symbols, but wait for decoding to report error */ + } + for (min = 1; min < max; min++) { + if (count[min] !== 0) { break; } + } + if (root < min) { + root = min; + } + + /* check for an over-subscribed or incomplete set of lengths */ + left = 1; + for (len = 1; len <= MAXBITS; len++) { + left <<= 1; + left -= count[len]; + if (left < 0) { + return -1; + } /* over-subscribed */ + } + if (left > 0 && (type === CODES || max !== 1)) { + return -1; /* incomplete set */ + } + + /* generate offsets into symbol table for each length for sorting */ + offs[1] = 0; + for (len = 1; len < MAXBITS; len++) { + offs[len + 1] = offs[len] + count[len]; + } + + /* sort symbols by length, by symbol order within each length */ + for (sym = 0; sym < codes; sym++) { + if (lens[lens_index + sym] !== 0) { + work[offs[lens[lens_index + sym]]++] = sym; + } + } + + /* + Create and fill in decoding tables. In this loop, the table being + filled is at next and has curr index bits. The code being used is huff + with length len. That code is converted to an index by dropping drop + bits off of the bottom. For codes where len is less than drop + curr, + those top drop + curr - len bits are incremented through all values to + fill the table with replicated entries. + + root is the number of index bits for the root table. When len exceeds + root, sub-tables are created pointed to by the root entry with an index + of the low root bits of huff. This is saved in low to check for when a + new sub-table should be started. drop is zero when the root table is + being filled, and drop is root when sub-tables are being filled. + + When a new sub-table is needed, it is necessary to look ahead in the + code lengths to determine what size sub-table is needed. The length + counts are used for this, and so count[] is decremented as codes are + entered in the tables. + + used keeps track of how many table entries have been allocated from the + provided *table space. It is checked for LENS and DIST tables against + the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in + the initial root table size constants. See the comments in inftrees.h + for more information. + + sym increments through all symbols, and the loop terminates when + all codes of length max, i.e. all codes, have been processed. This + routine permits incomplete codes, so another loop after this one fills + in the rest of the decoding tables with invalid code markers. + */ + + /* set up for code type */ + // poor man optimization - use if-else instead of switch, + // to avoid deopts in old v8 + if (type === CODES) { + base = extra = work; /* dummy value--not used */ + end = 19; + + } else if (type === LENS) { + base = lbase; + base_index -= 257; + extra = lext; + extra_index -= 257; + end = 256; + + } else { /* DISTS */ + base = dbase; + extra = dext; + end = -1; + } + + /* initialize opts for loop */ + huff = 0; /* starting code */ + sym = 0; /* starting code symbol */ + len = min; /* starting code length */ + next = table_index; /* current table to fill in */ + curr = root; /* current table index bits */ + drop = 0; /* current bits to drop from code for index */ + low = -1; /* trigger new sub-table when len > root */ + used = 1 << root; /* use root table entries */ + mask = used - 1; /* mask for comparing low */ + + /* check available table space */ + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* process all codes and make table entries */ + for (;;) { + /* create table entry */ + here_bits = len - drop; + if (work[sym] < end) { + here_op = 0; + here_val = work[sym]; + } + else if (work[sym] > end) { + here_op = extra[extra_index + work[sym]]; + here_val = base[base_index + work[sym]]; + } + else { + here_op = 32 + 64; /* end of block */ + here_val = 0; + } + + /* replicate for those indices with low len bits equal to huff */ + incr = 1 << (len - drop); + fill = 1 << curr; + min = fill; /* save offset to next table */ + do { + fill -= incr; + table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0; + } while (fill !== 0); + + /* backwards increment the len-bit code huff */ + incr = 1 << (len - 1); + while (huff & incr) { + incr >>= 1; + } + if (incr !== 0) { + huff &= incr - 1; + huff += incr; + } else { + huff = 0; + } + + /* go to next symbol, update count, len */ + sym++; + if (--count[len] === 0) { + if (len === max) { break; } + len = lens[lens_index + work[sym]]; + } + + /* create new sub-table if needed */ + if (len > root && (huff & mask) !== low) { + /* if first time, transition to sub-tables */ + if (drop === 0) { + drop = root; + } + + /* increment past last table */ + next += min; /* here min is 1 << curr */ + + /* determine length of next table */ + curr = len - drop; + left = 1 << curr; + while (curr + drop < max) { + left -= count[curr + drop]; + if (left <= 0) { break; } + curr++; + left <<= 1; + } + + /* check for enough space */ + used += 1 << curr; + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* point entry in root table to sub-table */ + low = huff & mask; + /*table.op[low] = curr; + table.bits[low] = root; + table.val[low] = next - opts.table_index;*/ + table[low] = (root << 24) | (curr << 16) | (next - table_index) |0; + } + } + + /* fill in remaining table entry if code is incomplete (guaranteed to have + at most one remaining entry, since if the code is incomplete, the + maximum code length that was allowed to get this far is one bit) */ + if (huff !== 0) { + //table.op[next + huff] = 64; /* invalid code marker */ + //table.bits[next + huff] = len - drop; + //table.val[next + huff] = 0; + table[next + huff] = ((len - drop) << 24) | (64 << 16) |0; + } + + /* set return parameters */ + //opts.table_index += used; + opts.bits = root; + return 0; +}; + + +/***/ }), + +/***/ 1890: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +module.exports = { + 2: 'need dictionary', /* Z_NEED_DICT 2 */ + 1: 'stream end', /* Z_STREAM_END 1 */ + 0: '', /* Z_OK 0 */ + '-1': 'file error', /* Z_ERRNO (-1) */ + '-2': 'stream error', /* Z_STREAM_ERROR (-2) */ + '-3': 'data error', /* Z_DATA_ERROR (-3) */ + '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */ + '-5': 'buffer error', /* Z_BUF_ERROR (-5) */ + '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */ +}; + + +/***/ }), + +/***/ 78754: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +/* eslint-disable space-unary-ops */ + +var utils = __nccwpck_require__(5483); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +//var Z_FILTERED = 1; +//var Z_HUFFMAN_ONLY = 2; +//var Z_RLE = 3; +var Z_FIXED = 4; +//var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +var Z_BINARY = 0; +var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + +/*============================================================================*/ + + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + +// From zutil.h + +var STORED_BLOCK = 0; +var STATIC_TREES = 1; +var DYN_TREES = 2; +/* The three kinds of block type */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +/* The minimum and maximum match lengths */ + +// From deflate.h +/* =========================================================================== + * Internal compression state. + */ + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ + +var LITERALS = 256; +/* number of literal bytes 0..255 */ + +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ + +var D_CODES = 30; +/* number of distance codes */ + +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ + +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ + +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var Buf_size = 16; +/* size of bit buffer in bi_buf */ + + +/* =========================================================================== + * Constants + */ + +var MAX_BL_BITS = 7; +/* Bit length codes must not exceed MAX_BL_BITS bits */ + +var END_BLOCK = 256; +/* end of block literal code */ + +var REP_3_6 = 16; +/* repeat previous bit length 3-6 times (2 bits of repeat count) */ + +var REPZ_3_10 = 17; +/* repeat a zero length 3-10 times (3 bits of repeat count) */ + +var REPZ_11_138 = 18; +/* repeat a zero length 11-138 times (7 bits of repeat count) */ + +/* eslint-disable comma-spacing,array-bracket-spacing */ +var extra_lbits = /* extra bits for each length code */ + [0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0]; + +var extra_dbits = /* extra bits for each distance code */ + [0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13]; + +var extra_blbits = /* extra bits for each bit length code */ + [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7]; + +var bl_order = + [16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15]; +/* eslint-enable comma-spacing,array-bracket-spacing */ + +/* The lengths of the bit length codes are sent in order of decreasing + * probability, to avoid transmitting the lengths for unused bit length codes. + */ + +/* =========================================================================== + * Local data. These are initialized only once. + */ + +// We pre-fill arrays with 0 to avoid uninitialized gaps + +var DIST_CODE_LEN = 512; /* see definition of array dist_code below */ + +// !!!! Use flat array instead of structure, Freq = i*2, Len = i*2+1 +var static_ltree = new Array((L_CODES + 2) * 2); +zero(static_ltree); +/* The static literal tree. Since the bit lengths are imposed, there is no + * need for the L_CODES extra codes used during heap construction. However + * The codes 286 and 287 are needed to build a canonical tree (see _tr_init + * below). + */ + +var static_dtree = new Array(D_CODES * 2); +zero(static_dtree); +/* The static distance tree. (Actually a trivial tree since all codes use + * 5 bits.) + */ + +var _dist_code = new Array(DIST_CODE_LEN); +zero(_dist_code); +/* Distance codes. The first 256 values correspond to the distances + * 3 .. 258, the last 256 values correspond to the top 8 bits of + * the 15 bit distances. + */ + +var _length_code = new Array(MAX_MATCH - MIN_MATCH + 1); +zero(_length_code); +/* length code for each normalized match length (0 == MIN_MATCH) */ + +var base_length = new Array(LENGTH_CODES); +zero(base_length); +/* First normalized length for each code (0 = MIN_MATCH) */ + +var base_dist = new Array(D_CODES); +zero(base_dist); +/* First normalized distance for each code (0 = distance of 1) */ + + +function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) { + + this.static_tree = static_tree; /* static tree or NULL */ + this.extra_bits = extra_bits; /* extra bits for each code or NULL */ + this.extra_base = extra_base; /* base index for extra_bits */ + this.elems = elems; /* max number of elements in the tree */ + this.max_length = max_length; /* max bit length for the codes */ + + // show if `static_tree` has data or dummy - needed for monomorphic objects + this.has_stree = static_tree && static_tree.length; +} + + +var static_l_desc; +var static_d_desc; +var static_bl_desc; + + +function TreeDesc(dyn_tree, stat_desc) { + this.dyn_tree = dyn_tree; /* the dynamic tree */ + this.max_code = 0; /* largest code with non zero frequency */ + this.stat_desc = stat_desc; /* the corresponding static tree */ +} + + + +function d_code(dist) { + return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)]; +} + + +/* =========================================================================== + * Output a short LSB first on the stream. + * IN assertion: there is enough room in pendingBuf. + */ +function put_short(s, w) { +// put_byte(s, (uch)((w) & 0xff)); +// put_byte(s, (uch)((ush)(w) >> 8)); + s.pending_buf[s.pending++] = (w) & 0xff; + s.pending_buf[s.pending++] = (w >>> 8) & 0xff; +} + + +/* =========================================================================== + * Send a value on a given number of bits. + * IN assertion: length <= 16 and value fits in length bits. + */ +function send_bits(s, value, length) { + if (s.bi_valid > (Buf_size - length)) { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + put_short(s, s.bi_buf); + s.bi_buf = value >> (Buf_size - s.bi_valid); + s.bi_valid += length - Buf_size; + } else { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + s.bi_valid += length; + } +} + + +function send_code(s, c, tree) { + send_bits(s, tree[c * 2]/*.Code*/, tree[c * 2 + 1]/*.Len*/); +} + + +/* =========================================================================== + * Reverse the first len bits of a code, using straightforward code (a faster + * method would use a table) + * IN assertion: 1 <= len <= 15 + */ +function bi_reverse(code, len) { + var res = 0; + do { + res |= code & 1; + code >>>= 1; + res <<= 1; + } while (--len > 0); + return res >>> 1; +} + + +/* =========================================================================== + * Flush the bit buffer, keeping at most 7 bits in it. + */ +function bi_flush(s) { + if (s.bi_valid === 16) { + put_short(s, s.bi_buf); + s.bi_buf = 0; + s.bi_valid = 0; + + } else if (s.bi_valid >= 8) { + s.pending_buf[s.pending++] = s.bi_buf & 0xff; + s.bi_buf >>= 8; + s.bi_valid -= 8; + } +} + + +/* =========================================================================== + * Compute the optimal bit lengths for a tree and update the total bit length + * for the current block. + * IN assertion: the fields freq and dad are set, heap[heap_max] and + * above are the tree nodes sorted by increasing frequency. + * OUT assertions: the field len is set to the optimal bit length, the + * array bl_count contains the frequencies for each bit length. + * The length opt_len is updated; static_len is also updated if stree is + * not null. + */ +function gen_bitlen(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var max_code = desc.max_code; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var extra = desc.stat_desc.extra_bits; + var base = desc.stat_desc.extra_base; + var max_length = desc.stat_desc.max_length; + var h; /* heap index */ + var n, m; /* iterate over the tree elements */ + var bits; /* bit length */ + var xbits; /* extra bits */ + var f; /* frequency */ + var overflow = 0; /* number of elements with bit length too large */ + + for (bits = 0; bits <= MAX_BITS; bits++) { + s.bl_count[bits] = 0; + } + + /* In a first pass, compute the optimal bit lengths (which may + * overflow in the case of the bit length tree). + */ + tree[s.heap[s.heap_max] * 2 + 1]/*.Len*/ = 0; /* root of the heap */ + + for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { + n = s.heap[h]; + bits = tree[tree[n * 2 + 1]/*.Dad*/ * 2 + 1]/*.Len*/ + 1; + if (bits > max_length) { + bits = max_length; + overflow++; + } + tree[n * 2 + 1]/*.Len*/ = bits; + /* We overwrite tree[n].Dad which is no longer needed */ + + if (n > max_code) { continue; } /* not a leaf node */ + + s.bl_count[bits]++; + xbits = 0; + if (n >= base) { + xbits = extra[n - base]; + } + f = tree[n * 2]/*.Freq*/; + s.opt_len += f * (bits + xbits); + if (has_stree) { + s.static_len += f * (stree[n * 2 + 1]/*.Len*/ + xbits); + } + } + if (overflow === 0) { return; } + + // Trace((stderr,"\nbit length overflow\n")); + /* This happens for example on obj2 and pic of the Calgary corpus */ + + /* Find the first bit length which could increase: */ + do { + bits = max_length - 1; + while (s.bl_count[bits] === 0) { bits--; } + s.bl_count[bits]--; /* move one leaf down the tree */ + s.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ + s.bl_count[max_length]--; + /* The brother of the overflow item also moves one step up, + * but this does not affect bl_count[max_length] + */ + overflow -= 2; + } while (overflow > 0); + + /* Now recompute all bit lengths, scanning in increasing frequency. + * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all + * lengths instead of fixing only the wrong ones. This idea is taken + * from 'ar' written by Haruhiko Okumura.) + */ + for (bits = max_length; bits !== 0; bits--) { + n = s.bl_count[bits]; + while (n !== 0) { + m = s.heap[--h]; + if (m > max_code) { continue; } + if (tree[m * 2 + 1]/*.Len*/ !== bits) { + // Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); + s.opt_len += (bits - tree[m * 2 + 1]/*.Len*/) * tree[m * 2]/*.Freq*/; + tree[m * 2 + 1]/*.Len*/ = bits; + } + n--; + } + } +} + + +/* =========================================================================== + * Generate the codes for a given tree and bit counts (which need not be + * optimal). + * IN assertion: the array bl_count contains the bit length statistics for + * the given tree and the field len is set for all tree elements. + * OUT assertion: the field code is set for all tree elements of non + * zero code length. + */ +function gen_codes(tree, max_code, bl_count) +// ct_data *tree; /* the tree to decorate */ +// int max_code; /* largest code with non zero frequency */ +// ushf *bl_count; /* number of codes at each bit length */ +{ + var next_code = new Array(MAX_BITS + 1); /* next code value for each bit length */ + var code = 0; /* running code value */ + var bits; /* bit index */ + var n; /* code index */ + + /* The distribution counts are first used to generate the code values + * without bit reversal. + */ + for (bits = 1; bits <= MAX_BITS; bits++) { + next_code[bits] = code = (code + bl_count[bits - 1]) << 1; + } + /* Check that the bit counts in bl_count are consistent. The last code + * must be all ones. + */ + //Assert (code + bl_count[MAX_BITS]-1 == (1< length code (0..28) */ + length = 0; + for (code = 0; code < LENGTH_CODES - 1; code++) { + base_length[code] = length; + for (n = 0; n < (1 << extra_lbits[code]); n++) { + _length_code[length++] = code; + } + } + //Assert (length == 256, "tr_static_init: length != 256"); + /* Note that the length 255 (match length 258) can be represented + * in two different ways: code 284 + 5 bits or code 285, so we + * overwrite length_code[255] to use the best encoding: + */ + _length_code[length - 1] = code; + + /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ + dist = 0; + for (code = 0; code < 16; code++) { + base_dist[code] = dist; + for (n = 0; n < (1 << extra_dbits[code]); n++) { + _dist_code[dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: dist != 256"); + dist >>= 7; /* from now on, all distances are divided by 128 */ + for (; code < D_CODES; code++) { + base_dist[code] = dist << 7; + for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { + _dist_code[256 + dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: 256+dist != 512"); + + /* Construct the codes of the static literal tree */ + for (bits = 0; bits <= MAX_BITS; bits++) { + bl_count[bits] = 0; + } + + n = 0; + while (n <= 143) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + while (n <= 255) { + static_ltree[n * 2 + 1]/*.Len*/ = 9; + n++; + bl_count[9]++; + } + while (n <= 279) { + static_ltree[n * 2 + 1]/*.Len*/ = 7; + n++; + bl_count[7]++; + } + while (n <= 287) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + /* Codes 286 and 287 do not exist, but we must include them in the + * tree construction to get a canonical Huffman tree (longest code + * all ones) + */ + gen_codes(static_ltree, L_CODES + 1, bl_count); + + /* The static distance tree is trivial: */ + for (n = 0; n < D_CODES; n++) { + static_dtree[n * 2 + 1]/*.Len*/ = 5; + static_dtree[n * 2]/*.Code*/ = bi_reverse(n, 5); + } + + // Now data ready and we can init static trees + static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS); + static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS); + static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS); + + //static_init_done = true; +} + + +/* =========================================================================== + * Initialize a new block. + */ +function init_block(s) { + var n; /* iterates over tree elements */ + + /* Initialize the trees. */ + for (n = 0; n < L_CODES; n++) { s.dyn_ltree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < D_CODES; n++) { s.dyn_dtree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < BL_CODES; n++) { s.bl_tree[n * 2]/*.Freq*/ = 0; } + + s.dyn_ltree[END_BLOCK * 2]/*.Freq*/ = 1; + s.opt_len = s.static_len = 0; + s.last_lit = s.matches = 0; +} + + +/* =========================================================================== + * Flush the bit buffer and align the output on a byte boundary + */ +function bi_windup(s) +{ + if (s.bi_valid > 8) { + put_short(s, s.bi_buf); + } else if (s.bi_valid > 0) { + //put_byte(s, (Byte)s->bi_buf); + s.pending_buf[s.pending++] = s.bi_buf; + } + s.bi_buf = 0; + s.bi_valid = 0; +} + +/* =========================================================================== + * Copy a stored block, storing first the length and its + * one's complement if requested. + */ +function copy_block(s, buf, len, header) +//DeflateState *s; +//charf *buf; /* the input data */ +//unsigned len; /* its length */ +//int header; /* true if block header must be written */ +{ + bi_windup(s); /* align on byte boundary */ + + if (header) { + put_short(s, len); + put_short(s, ~len); + } +// while (len--) { +// put_byte(s, *buf++); +// } + utils.arraySet(s.pending_buf, s.window, buf, len, s.pending); + s.pending += len; +} + +/* =========================================================================== + * Compares to subtrees, using the tree depth as tie breaker when + * the subtrees have equal frequency. This minimizes the worst case length. + */ +function smaller(tree, n, m, depth) { + var _n2 = n * 2; + var _m2 = m * 2; + return (tree[_n2]/*.Freq*/ < tree[_m2]/*.Freq*/ || + (tree[_n2]/*.Freq*/ === tree[_m2]/*.Freq*/ && depth[n] <= depth[m])); +} + +/* =========================================================================== + * Restore the heap property by moving down the tree starting at node k, + * exchanging a node with the smallest of its two sons if necessary, stopping + * when the heap property is re-established (each father smaller than its + * two sons). + */ +function pqdownheap(s, tree, k) +// deflate_state *s; +// ct_data *tree; /* the tree to restore */ +// int k; /* node to move down */ +{ + var v = s.heap[k]; + var j = k << 1; /* left son of k */ + while (j <= s.heap_len) { + /* Set j to the smallest of the two sons: */ + if (j < s.heap_len && + smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) { + j++; + } + /* Exit if v is smaller than both sons */ + if (smaller(tree, v, s.heap[j], s.depth)) { break; } + + /* Exchange v with the smallest son */ + s.heap[k] = s.heap[j]; + k = j; + + /* And continue down the tree, setting j to the left son of k */ + j <<= 1; + } + s.heap[k] = v; +} + + +// inlined manually +// var SMALLEST = 1; + +/* =========================================================================== + * Send the block data compressed using the given Huffman trees + */ +function compress_block(s, ltree, dtree) +// deflate_state *s; +// const ct_data *ltree; /* literal tree */ +// const ct_data *dtree; /* distance tree */ +{ + var dist; /* distance of matched string */ + var lc; /* match length or unmatched char (if dist == 0) */ + var lx = 0; /* running index in l_buf */ + var code; /* the code to send */ + var extra; /* number of extra bits to send */ + + if (s.last_lit !== 0) { + do { + dist = (s.pending_buf[s.d_buf + lx * 2] << 8) | (s.pending_buf[s.d_buf + lx * 2 + 1]); + lc = s.pending_buf[s.l_buf + lx]; + lx++; + + if (dist === 0) { + send_code(s, lc, ltree); /* send a literal byte */ + //Tracecv(isgraph(lc), (stderr," '%c' ", lc)); + } else { + /* Here, lc is the match length - MIN_MATCH */ + code = _length_code[lc]; + send_code(s, code + LITERALS + 1, ltree); /* send the length code */ + extra = extra_lbits[code]; + if (extra !== 0) { + lc -= base_length[code]; + send_bits(s, lc, extra); /* send the extra length bits */ + } + dist--; /* dist is now the match distance - 1 */ + code = d_code(dist); + //Assert (code < D_CODES, "bad d_code"); + + send_code(s, code, dtree); /* send the distance code */ + extra = extra_dbits[code]; + if (extra !== 0) { + dist -= base_dist[code]; + send_bits(s, dist, extra); /* send the extra distance bits */ + } + } /* literal or match pair ? */ + + /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ + //Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, + // "pendingBuf overflow"); + + } while (lx < s.last_lit); + } + + send_code(s, END_BLOCK, ltree); +} + + +/* =========================================================================== + * Construct one Huffman tree and assigns the code bit strings and lengths. + * Update the total bit length for the current block. + * IN assertion: the field freq is set for all tree elements. + * OUT assertions: the fields len and code are set to the optimal bit length + * and corresponding code. The length opt_len is updated; static_len is + * also updated if stree is not null. The field max_code is set. + */ +function build_tree(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var elems = desc.stat_desc.elems; + var n, m; /* iterate over heap elements */ + var max_code = -1; /* largest code with non zero frequency */ + var node; /* new node being created */ + + /* Construct the initial heap, with least frequent element in + * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. + * heap[0] is not used. + */ + s.heap_len = 0; + s.heap_max = HEAP_SIZE; + + for (n = 0; n < elems; n++) { + if (tree[n * 2]/*.Freq*/ !== 0) { + s.heap[++s.heap_len] = max_code = n; + s.depth[n] = 0; + + } else { + tree[n * 2 + 1]/*.Len*/ = 0; + } + } + + /* The pkzip format requires that at least one distance code exists, + * and that at least one bit should be sent even if there is only one + * possible code. So to avoid special checks later on we force at least + * two codes of non zero frequency. + */ + while (s.heap_len < 2) { + node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0); + tree[node * 2]/*.Freq*/ = 1; + s.depth[node] = 0; + s.opt_len--; + + if (has_stree) { + s.static_len -= stree[node * 2 + 1]/*.Len*/; + } + /* node is 0 or 1 so it does not have extra bits */ + } + desc.max_code = max_code; + + /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, + * establish sub-heaps of increasing lengths: + */ + for (n = (s.heap_len >> 1/*int /2*/); n >= 1; n--) { pqdownheap(s, tree, n); } + + /* Construct the Huffman tree by repeatedly combining the least two + * frequent nodes. + */ + node = elems; /* next internal node of the tree */ + do { + //pqremove(s, tree, n); /* n = node of least frequency */ + /*** pqremove ***/ + n = s.heap[1/*SMALLEST*/]; + s.heap[1/*SMALLEST*/] = s.heap[s.heap_len--]; + pqdownheap(s, tree, 1/*SMALLEST*/); + /***/ + + m = s.heap[1/*SMALLEST*/]; /* m = node of next least frequency */ + + s.heap[--s.heap_max] = n; /* keep the nodes sorted by frequency */ + s.heap[--s.heap_max] = m; + + /* Create a new node father of n and m */ + tree[node * 2]/*.Freq*/ = tree[n * 2]/*.Freq*/ + tree[m * 2]/*.Freq*/; + s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1; + tree[n * 2 + 1]/*.Dad*/ = tree[m * 2 + 1]/*.Dad*/ = node; + + /* and insert the new node in the heap */ + s.heap[1/*SMALLEST*/] = node++; + pqdownheap(s, tree, 1/*SMALLEST*/); + + } while (s.heap_len >= 2); + + s.heap[--s.heap_max] = s.heap[1/*SMALLEST*/]; + + /* At this point, the fields freq and dad are set. We can now + * generate the bit lengths. + */ + gen_bitlen(s, desc); + + /* The field len is now set, we can generate the bit codes */ + gen_codes(tree, max_code, s.bl_count); +} + + +/* =========================================================================== + * Scan a literal or distance tree to determine the frequencies of the codes + * in the bit length tree. + */ +function scan_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + tree[(max_code + 1) * 2 + 1]/*.Len*/ = 0xffff; /* guard */ + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + s.bl_tree[curlen * 2]/*.Freq*/ += count; + + } else if (curlen !== 0) { + + if (curlen !== prevlen) { s.bl_tree[curlen * 2]/*.Freq*/++; } + s.bl_tree[REP_3_6 * 2]/*.Freq*/++; + + } else if (count <= 10) { + s.bl_tree[REPZ_3_10 * 2]/*.Freq*/++; + + } else { + s.bl_tree[REPZ_11_138 * 2]/*.Freq*/++; + } + + count = 0; + prevlen = curlen; + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Send a literal or distance tree in compressed form, using the codes in + * bl_tree. + */ +function send_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + /* tree[max_code+1].Len = -1; */ /* guard already set */ + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + do { send_code(s, curlen, s.bl_tree); } while (--count !== 0); + + } else if (curlen !== 0) { + if (curlen !== prevlen) { + send_code(s, curlen, s.bl_tree); + count--; + } + //Assert(count >= 3 && count <= 6, " 3_6?"); + send_code(s, REP_3_6, s.bl_tree); + send_bits(s, count - 3, 2); + + } else if (count <= 10) { + send_code(s, REPZ_3_10, s.bl_tree); + send_bits(s, count - 3, 3); + + } else { + send_code(s, REPZ_11_138, s.bl_tree); + send_bits(s, count - 11, 7); + } + + count = 0; + prevlen = curlen; + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Construct the Huffman tree for the bit lengths and return the index in + * bl_order of the last bit length code to send. + */ +function build_bl_tree(s) { + var max_blindex; /* index of last bit length code of non zero freq */ + + /* Determine the bit length frequencies for literal and distance trees */ + scan_tree(s, s.dyn_ltree, s.l_desc.max_code); + scan_tree(s, s.dyn_dtree, s.d_desc.max_code); + + /* Build the bit length tree: */ + build_tree(s, s.bl_desc); + /* opt_len now includes the length of the tree representations, except + * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. + */ + + /* Determine the number of bit length codes to send. The pkzip format + * requires that at least 4 bit length codes be sent. (appnote.txt says + * 3 but the actual value used is 4.) + */ + for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { + if (s.bl_tree[bl_order[max_blindex] * 2 + 1]/*.Len*/ !== 0) { + break; + } + } + /* Update opt_len to include the bit length tree and counts */ + s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; + //Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", + // s->opt_len, s->static_len)); + + return max_blindex; +} + + +/* =========================================================================== + * Send the header for a block using dynamic Huffman trees: the counts, the + * lengths of the bit length codes, the literal tree and the distance tree. + * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. + */ +function send_all_trees(s, lcodes, dcodes, blcodes) +// deflate_state *s; +// int lcodes, dcodes, blcodes; /* number of codes for each tree */ +{ + var rank; /* index in bl_order */ + + //Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); + //Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, + // "too many codes"); + //Tracev((stderr, "\nbl counts: ")); + send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ + send_bits(s, dcodes - 1, 5); + send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ + for (rank = 0; rank < blcodes; rank++) { + //Tracev((stderr, "\nbl code %2d ", bl_order[rank])); + send_bits(s, s.bl_tree[bl_order[rank] * 2 + 1]/*.Len*/, 3); + } + //Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_ltree, lcodes - 1); /* literal tree */ + //Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_dtree, dcodes - 1); /* distance tree */ + //Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); +} + + +/* =========================================================================== + * Check if the data type is TEXT or BINARY, using the following algorithm: + * - TEXT if the two conditions below are satisfied: + * a) There are no non-portable control characters belonging to the + * "black list" (0..6, 14..25, 28..31). + * b) There is at least one printable character belonging to the + * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). + * - BINARY otherwise. + * - The following partially-portable control characters form a + * "gray list" that is ignored in this detection algorithm: + * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). + * IN assertion: the fields Freq of dyn_ltree are set. + */ +function detect_data_type(s) { + /* black_mask is the bit mask of black-listed bytes + * set bits 0..6, 14..25, and 28..31 + * 0xf3ffc07f = binary 11110011111111111100000001111111 + */ + var black_mask = 0xf3ffc07f; + var n; + + /* Check for non-textual ("black-listed") bytes. */ + for (n = 0; n <= 31; n++, black_mask >>>= 1) { + if ((black_mask & 1) && (s.dyn_ltree[n * 2]/*.Freq*/ !== 0)) { + return Z_BINARY; + } + } + + /* Check for textual ("white-listed") bytes. */ + if (s.dyn_ltree[9 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[10 * 2]/*.Freq*/ !== 0 || + s.dyn_ltree[13 * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + for (n = 32; n < LITERALS; n++) { + if (s.dyn_ltree[n * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + } + + /* There are no "black-listed" or "white-listed" bytes: + * this stream either is empty or has tolerated ("gray-listed") bytes only. + */ + return Z_BINARY; +} + + +var static_init_done = false; + +/* =========================================================================== + * Initialize the tree data structures for a new zlib stream. + */ +function _tr_init(s) +{ + + if (!static_init_done) { + tr_static_init(); + static_init_done = true; + } + + s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc); + s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc); + s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc); + + s.bi_buf = 0; + s.bi_valid = 0; + + /* Initialize the first block of the first file: */ + init_block(s); +} + + +/* =========================================================================== + * Send a stored block + */ +function _tr_stored_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3); /* send block type */ + copy_block(s, buf, stored_len, true); /* with header */ +} + + +/* =========================================================================== + * Send one empty static block to give enough lookahead for inflate. + * This takes 10 bits, of which 7 may remain in the bit buffer. + */ +function _tr_align(s) { + send_bits(s, STATIC_TREES << 1, 3); + send_code(s, END_BLOCK, static_ltree); + bi_flush(s); +} + + +/* =========================================================================== + * Determine the best encoding for the current block: dynamic trees, static + * trees or store, and output the encoded block to the zip file. + */ +function _tr_flush_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block, or NULL if too old */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + var opt_lenb, static_lenb; /* opt_len and static_len in bytes */ + var max_blindex = 0; /* index of last bit length code of non zero freq */ + + /* Build the Huffman trees unless a stored block is forced */ + if (s.level > 0) { + + /* Check if the file is binary or text */ + if (s.strm.data_type === Z_UNKNOWN) { + s.strm.data_type = detect_data_type(s); + } + + /* Construct the literal and distance trees */ + build_tree(s, s.l_desc); + // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + + build_tree(s, s.d_desc); + // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + /* At this point, opt_len and static_len are the total bit lengths of + * the compressed block data, excluding the tree representations. + */ + + /* Build the bit length tree for the above two trees, and get the index + * in bl_order of the last bit length code to send. + */ + max_blindex = build_bl_tree(s); + + /* Determine the best encoding. Compute the block lengths in bytes. */ + opt_lenb = (s.opt_len + 3 + 7) >>> 3; + static_lenb = (s.static_len + 3 + 7) >>> 3; + + // Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", + // opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, + // s->last_lit)); + + if (static_lenb <= opt_lenb) { opt_lenb = static_lenb; } + + } else { + // Assert(buf != (char*)0, "lost buf"); + opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ + } + + if ((stored_len + 4 <= opt_lenb) && (buf !== -1)) { + /* 4: two words for the lengths */ + + /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. + * Otherwise we can't have processed more than WSIZE input bytes since + * the last block flush, because compression would have been + * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to + * transform a block into a stored block. + */ + _tr_stored_block(s, buf, stored_len, last); + + } else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) { + + send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3); + compress_block(s, static_ltree, static_dtree); + + } else { + send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3); + send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1); + compress_block(s, s.dyn_ltree, s.dyn_dtree); + } + // Assert (s->compressed_len == s->bits_sent, "bad compressed size"); + /* The above check is made mod 2^32, for files larger than 512 MB + * and uLong implemented on 32 bits. + */ + init_block(s); + + if (last) { + bi_windup(s); + } + // Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, + // s->compressed_len-7*last)); +} + +/* =========================================================================== + * Save the match info and tally the frequency counts. Return true if + * the current block must be flushed. + */ +function _tr_tally(s, dist, lc) +// deflate_state *s; +// unsigned dist; /* distance of matched string */ +// unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ +{ + //var out_length, in_length, dcode; + + s.pending_buf[s.d_buf + s.last_lit * 2] = (dist >>> 8) & 0xff; + s.pending_buf[s.d_buf + s.last_lit * 2 + 1] = dist & 0xff; + + s.pending_buf[s.l_buf + s.last_lit] = lc & 0xff; + s.last_lit++; + + if (dist === 0) { + /* lc is the unmatched char */ + s.dyn_ltree[lc * 2]/*.Freq*/++; + } else { + s.matches++; + /* Here, lc is the match length - MIN_MATCH */ + dist--; /* dist = match distance - 1 */ + //Assert((ush)dist < (ush)MAX_DIST(s) && + // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && + // (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); + + s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2]/*.Freq*/++; + s.dyn_dtree[d_code(dist) * 2]/*.Freq*/++; + } + +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility + +//#ifdef TRUNCATE_BLOCK +// /* Try to guess if it is profitable to stop the current block here */ +// if ((s.last_lit & 0x1fff) === 0 && s.level > 2) { +// /* Compute an upper bound for the compressed length */ +// out_length = s.last_lit*8; +// in_length = s.strstart - s.block_start; +// +// for (dcode = 0; dcode < D_CODES; dcode++) { +// out_length += s.dyn_dtree[dcode*2]/*.Freq*/ * (5 + extra_dbits[dcode]); +// } +// out_length >>>= 3; +// //Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", +// // s->last_lit, in_length, out_length, +// // 100L - out_length*100L/in_length)); +// if (s.matches < (s.last_lit>>1)/*int /2*/ && out_length < (in_length>>1)/*int /2*/) { +// return true; +// } +// } +//#endif + + return (s.last_lit === s.lit_bufsize - 1); + /* We avoid equality with lit_bufsize because of wraparound at 64K + * on 16 bit machines and because stored blocks are restricted to + * 64K-1 bytes. + */ +} + +exports._tr_init = _tr_init; +exports._tr_stored_block = _tr_stored_block; +exports._tr_flush_block = _tr_flush_block; +exports._tr_tally = _tr_tally; +exports._tr_align = _tr_align; + + +/***/ }), + +/***/ 86442: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +function ZStream() { + /* next input byte */ + this.input = null; // JS specific, because we have no pointers + this.next_in = 0; + /* number of bytes available at input */ + this.avail_in = 0; + /* total number of input bytes read so far */ + this.total_in = 0; + /* next output byte should be put there */ + this.output = null; // JS specific, because we have no pointers + this.next_out = 0; + /* remaining free space at output */ + this.avail_out = 0; + /* total number of bytes output so far */ + this.total_out = 0; + /* last error message, NULL if no error */ + this.msg = ''/*Z_NULL*/; + /* not visible by applications */ + this.state = null; + /* best guess about the data type: binary or text */ + this.data_type = 2/*Z_UNKNOWN*/; + /* adler32 value of the uncompressed data */ + this.adler = 0; +} + +module.exports = ZStream; + + +/***/ }), + +/***/ 11473: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +//through@2 handles this by default! +module.exports = __nccwpck_require__(10421) + + + +/***/ }), + +/***/ 64810: +/***/ ((module) => { + +"use strict"; + + +const processFn = (fn, options) => function (...args) { + const P = options.promiseModule; + + return new P((resolve, reject) => { + if (options.multiArgs) { + args.push((...result) => { + if (options.errorFirst) { + if (result[0]) { + reject(result); + } else { + result.shift(); + resolve(result); + } + } else { + resolve(result); + } + }); + } else if (options.errorFirst) { + args.push((error, result) => { + if (error) { + reject(error); + } else { + resolve(result); + } + }); + } else { + args.push(resolve); + } + + fn.apply(this, args); + }); +}; + +module.exports = (input, options) => { + options = Object.assign({ + exclude: [/.+(Sync|Stream)$/], + errorFirst: true, + promiseModule: Promise + }, options); + + const objType = typeof input; + if (!(input !== null && (objType === 'object' || objType === 'function'))) { + throw new TypeError(`Expected \`input\` to be a \`Function\` or \`Object\`, got \`${input === null ? 'null' : objType}\``); + } + + const filter = key => { + const match = pattern => typeof pattern === 'string' ? key === pattern : pattern.test(key); + return options.include ? options.include.some(match) : !options.exclude.some(match); + }; + + let ret; + if (objType === 'function') { + ret = function (...args) { + return options.excludeMain ? input(...args) : processFn(input, options).apply(this, args); + }; + } else { + ret = Object.create(Object.getPrototypeOf(input)); + } + + for (const key in input) { // eslint-disable-line guard-for-in + const property = input[key]; + ret[key] = typeof property === 'function' && filter(key) ? processFn(property, options) : property; + } + + return ret; +}; + + +/***/ }), + +/***/ 48208: +/***/ (function(module, exports, __nccwpck_require__) { + +/* module decorator */ module = __nccwpck_require__.nmd(module); +/*! + * Platform.js v1.3.6 + * Copyright 2014-2020 Benjamin Tan + * Copyright 2011-2013 John-David Dalton + * Available under MIT license + */ +;(function() { + 'use strict'; + + /** Used to determine if values are of the language type `Object`. */ + var objectTypes = { + 'function': true, + 'object': true + }; + + /** Used as a reference to the global object. */ + var root = (objectTypes[typeof window] && window) || this; + + /** Backup possible global object. */ + var oldRoot = root; + + /** Detect free variable `exports`. */ + var freeExports = objectTypes[typeof exports] && exports; + + /** Detect free variable `module`. */ + var freeModule = objectTypes["object"] && module && !module.nodeType && module; + + /** Detect free variable `global` from Node.js or Browserified code and use it as `root`. */ + var freeGlobal = freeExports && freeModule && typeof global == 'object' && global; + if (freeGlobal && (freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal || freeGlobal.self === freeGlobal)) { + root = freeGlobal; + } + + /** + * Used as the maximum length of an array-like object. + * See the [ES6 spec](http://people.mozilla.org/~jorendorff/es6-draft.html#sec-tolength) + * for more details. + */ + var maxSafeInteger = Math.pow(2, 53) - 1; + + /** Regular expression to detect Opera. */ + var reOpera = /\bOpera/; + + /** Possible global object. */ + var thisBinding = this; + + /** Used for native method references. */ + var objectProto = Object.prototype; + + /** Used to check for own properties of an object. */ + var hasOwnProperty = objectProto.hasOwnProperty; + + /** Used to resolve the internal `[[Class]]` of values. */ + var toString = objectProto.toString; + + /*--------------------------------------------------------------------------*/ + + /** + * Capitalizes a string value. + * + * @private + * @param {string} string The string to capitalize. + * @returns {string} The capitalized string. + */ + function capitalize(string) { + string = String(string); + return string.charAt(0).toUpperCase() + string.slice(1); + } + + /** + * A utility function to clean up the OS name. + * + * @private + * @param {string} os The OS name to clean up. + * @param {string} [pattern] A `RegExp` pattern matching the OS name. + * @param {string} [label] A label for the OS. + */ + function cleanupOS(os, pattern, label) { + // Platform tokens are defined at: + // http://msdn.microsoft.com/en-us/library/ms537503(VS.85).aspx + // http://web.archive.org/web/20081122053950/http://msdn.microsoft.com/en-us/library/ms537503(VS.85).aspx + var data = { + '10.0': '10', + '6.4': '10 Technical Preview', + '6.3': '8.1', + '6.2': '8', + '6.1': 'Server 2008 R2 / 7', + '6.0': 'Server 2008 / Vista', + '5.2': 'Server 2003 / XP 64-bit', + '5.1': 'XP', + '5.01': '2000 SP1', + '5.0': '2000', + '4.0': 'NT', + '4.90': 'ME' + }; + // Detect Windows version from platform tokens. + if (pattern && label && /^Win/i.test(os) && !/^Windows Phone /i.test(os) && + (data = data[/[\d.]+$/.exec(os)])) { + os = 'Windows ' + data; + } + // Correct character case and cleanup string. + os = String(os); + + if (pattern && label) { + os = os.replace(RegExp(pattern, 'i'), label); + } + + os = format( + os.replace(/ ce$/i, ' CE') + .replace(/\bhpw/i, 'web') + .replace(/\bMacintosh\b/, 'Mac OS') + .replace(/_PowerPC\b/i, ' OS') + .replace(/\b(OS X) [^ \d]+/i, '$1') + .replace(/\bMac (OS X)\b/, '$1') + .replace(/\/(\d)/, ' $1') + .replace(/_/g, '.') + .replace(/(?: BePC|[ .]*fc[ \d.]+)$/i, '') + .replace(/\bx86\.64\b/gi, 'x86_64') + .replace(/\b(Windows Phone) OS\b/, '$1') + .replace(/\b(Chrome OS \w+) [\d.]+\b/, '$1') + .split(' on ')[0] + ); + + return os; + } + + /** + * An iteration utility for arrays and objects. + * + * @private + * @param {Array|Object} object The object to iterate over. + * @param {Function} callback The function called per iteration. + */ + function each(object, callback) { + var index = -1, + length = object ? object.length : 0; + + if (typeof length == 'number' && length > -1 && length <= maxSafeInteger) { + while (++index < length) { + callback(object[index], index, object); + } + } else { + forOwn(object, callback); + } + } + + /** + * Trim and conditionally capitalize string values. + * + * @private + * @param {string} string The string to format. + * @returns {string} The formatted string. + */ + function format(string) { + string = trim(string); + return /^(?:webOS|i(?:OS|P))/.test(string) + ? string + : capitalize(string); + } + + /** + * Iterates over an object's own properties, executing the `callback` for each. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} callback The function executed per own property. + */ + function forOwn(object, callback) { + for (var key in object) { + if (hasOwnProperty.call(object, key)) { + callback(object[key], key, object); + } + } + } + + /** + * Gets the internal `[[Class]]` of a value. + * + * @private + * @param {*} value The value. + * @returns {string} The `[[Class]]`. + */ + function getClassOf(value) { + return value == null + ? capitalize(value) + : toString.call(value).slice(8, -1); + } + + /** + * Host objects can return type values that are different from their actual + * data type. The objects we are concerned with usually return non-primitive + * types of "object", "function", or "unknown". + * + * @private + * @param {*} object The owner of the property. + * @param {string} property The property to check. + * @returns {boolean} Returns `true` if the property value is a non-primitive, else `false`. + */ + function isHostType(object, property) { + var type = object != null ? typeof object[property] : 'number'; + return !/^(?:boolean|number|string|undefined)$/.test(type) && + (type == 'object' ? !!object[property] : true); + } + + /** + * Prepares a string for use in a `RegExp` by making hyphens and spaces optional. + * + * @private + * @param {string} string The string to qualify. + * @returns {string} The qualified string. + */ + function qualify(string) { + return String(string).replace(/([ -])(?!$)/g, '$1?'); + } + + /** + * A bare-bones `Array#reduce` like utility function. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function called per iteration. + * @returns {*} The accumulated result. + */ + function reduce(array, callback) { + var accumulator = null; + each(array, function(value, index) { + accumulator = callback(accumulator, value, index, array); + }); + return accumulator; + } + + /** + * Removes leading and trailing whitespace from a string. + * + * @private + * @param {string} string The string to trim. + * @returns {string} The trimmed string. + */ + function trim(string) { + return String(string).replace(/^ +| +$/g, ''); + } + + /*--------------------------------------------------------------------------*/ + + /** + * Creates a new platform object. + * + * @memberOf platform + * @param {Object|string} [ua=navigator.userAgent] The user agent string or + * context object. + * @returns {Object} A platform object. + */ + function parse(ua) { + + /** The environment context object. */ + var context = root; + + /** Used to flag when a custom context is provided. */ + var isCustomContext = ua && typeof ua == 'object' && getClassOf(ua) != 'String'; + + // Juggle arguments. + if (isCustomContext) { + context = ua; + ua = null; + } + + /** Browser navigator object. */ + var nav = context.navigator || {}; + + /** Browser user agent string. */ + var userAgent = nav.userAgent || ''; + + ua || (ua = userAgent); + + /** Used to flag when `thisBinding` is the [ModuleScope]. */ + var isModuleScope = isCustomContext || thisBinding == oldRoot; + + /** Used to detect if browser is like Chrome. */ + var likeChrome = isCustomContext + ? !!nav.likeChrome + : /\bChrome\b/.test(ua) && !/internal|\n/i.test(toString.toString()); + + /** Internal `[[Class]]` value shortcuts. */ + var objectClass = 'Object', + airRuntimeClass = isCustomContext ? objectClass : 'ScriptBridgingProxyObject', + enviroClass = isCustomContext ? objectClass : 'Environment', + javaClass = (isCustomContext && context.java) ? 'JavaPackage' : getClassOf(context.java), + phantomClass = isCustomContext ? objectClass : 'RuntimeObject'; + + /** Detect Java environments. */ + var java = /\bJava/.test(javaClass) && context.java; + + /** Detect Rhino. */ + var rhino = java && getClassOf(context.environment) == enviroClass; + + /** A character to represent alpha. */ + var alpha = java ? 'a' : '\u03b1'; + + /** A character to represent beta. */ + var beta = java ? 'b' : '\u03b2'; + + /** Browser document object. */ + var doc = context.document || {}; + + /** + * Detect Opera browser (Presto-based). + * http://www.howtocreate.co.uk/operaStuff/operaObject.html + * http://dev.opera.com/articles/view/opera-mini-web-content-authoring-guidelines/#operamini + */ + var opera = context.operamini || context.opera; + + /** Opera `[[Class]]`. */ + var operaClass = reOpera.test(operaClass = (isCustomContext && opera) ? opera['[[Class]]'] : getClassOf(opera)) + ? operaClass + : (opera = null); + + /*------------------------------------------------------------------------*/ + + /** Temporary variable used over the script's lifetime. */ + var data; + + /** The CPU architecture. */ + var arch = ua; + + /** Platform description array. */ + var description = []; + + /** Platform alpha/beta indicator. */ + var prerelease = null; + + /** A flag to indicate that environment features should be used to resolve the platform. */ + var useFeatures = ua == userAgent; + + /** The browser/environment version. */ + var version = useFeatures && opera && typeof opera.version == 'function' && opera.version(); + + /** A flag to indicate if the OS ends with "/ Version" */ + var isSpecialCasedOS; + + /* Detectable layout engines (order is important). */ + var layout = getLayout([ + { 'label': 'EdgeHTML', 'pattern': 'Edge' }, + 'Trident', + { 'label': 'WebKit', 'pattern': 'AppleWebKit' }, + 'iCab', + 'Presto', + 'NetFront', + 'Tasman', + 'KHTML', + 'Gecko' + ]); + + /* Detectable browser names (order is important). */ + var name = getName([ + 'Adobe AIR', + 'Arora', + 'Avant Browser', + 'Breach', + 'Camino', + 'Electron', + 'Epiphany', + 'Fennec', + 'Flock', + 'Galeon', + 'GreenBrowser', + 'iCab', + 'Iceweasel', + 'K-Meleon', + 'Konqueror', + 'Lunascape', + 'Maxthon', + { 'label': 'Microsoft Edge', 'pattern': '(?:Edge|Edg|EdgA|EdgiOS)' }, + 'Midori', + 'Nook Browser', + 'PaleMoon', + 'PhantomJS', + 'Raven', + 'Rekonq', + 'RockMelt', + { 'label': 'Samsung Internet', 'pattern': 'SamsungBrowser' }, + 'SeaMonkey', + { 'label': 'Silk', 'pattern': '(?:Cloud9|Silk-Accelerated)' }, + 'Sleipnir', + 'SlimBrowser', + { 'label': 'SRWare Iron', 'pattern': 'Iron' }, + 'Sunrise', + 'Swiftfox', + 'Vivaldi', + 'Waterfox', + 'WebPositive', + { 'label': 'Yandex Browser', 'pattern': 'YaBrowser' }, + { 'label': 'UC Browser', 'pattern': 'UCBrowser' }, + 'Opera Mini', + { 'label': 'Opera Mini', 'pattern': 'OPiOS' }, + 'Opera', + { 'label': 'Opera', 'pattern': 'OPR' }, + 'Chromium', + 'Chrome', + { 'label': 'Chrome', 'pattern': '(?:HeadlessChrome)' }, + { 'label': 'Chrome Mobile', 'pattern': '(?:CriOS|CrMo)' }, + { 'label': 'Firefox', 'pattern': '(?:Firefox|Minefield)' }, + { 'label': 'Firefox for iOS', 'pattern': 'FxiOS' }, + { 'label': 'IE', 'pattern': 'IEMobile' }, + { 'label': 'IE', 'pattern': 'MSIE' }, + 'Safari' + ]); + + /* Detectable products (order is important). */ + var product = getProduct([ + { 'label': 'BlackBerry', 'pattern': 'BB10' }, + 'BlackBerry', + { 'label': 'Galaxy S', 'pattern': 'GT-I9000' }, + { 'label': 'Galaxy S2', 'pattern': 'GT-I9100' }, + { 'label': 'Galaxy S3', 'pattern': 'GT-I9300' }, + { 'label': 'Galaxy S4', 'pattern': 'GT-I9500' }, + { 'label': 'Galaxy S5', 'pattern': 'SM-G900' }, + { 'label': 'Galaxy S6', 'pattern': 'SM-G920' }, + { 'label': 'Galaxy S6 Edge', 'pattern': 'SM-G925' }, + { 'label': 'Galaxy S7', 'pattern': 'SM-G930' }, + { 'label': 'Galaxy S7 Edge', 'pattern': 'SM-G935' }, + 'Google TV', + 'Lumia', + 'iPad', + 'iPod', + 'iPhone', + 'Kindle', + { 'label': 'Kindle Fire', 'pattern': '(?:Cloud9|Silk-Accelerated)' }, + 'Nexus', + 'Nook', + 'PlayBook', + 'PlayStation Vita', + 'PlayStation', + 'TouchPad', + 'Transformer', + { 'label': 'Wii U', 'pattern': 'WiiU' }, + 'Wii', + 'Xbox One', + { 'label': 'Xbox 360', 'pattern': 'Xbox' }, + 'Xoom' + ]); + + /* Detectable manufacturers. */ + var manufacturer = getManufacturer({ + 'Apple': { 'iPad': 1, 'iPhone': 1, 'iPod': 1 }, + 'Alcatel': {}, + 'Archos': {}, + 'Amazon': { 'Kindle': 1, 'Kindle Fire': 1 }, + 'Asus': { 'Transformer': 1 }, + 'Barnes & Noble': { 'Nook': 1 }, + 'BlackBerry': { 'PlayBook': 1 }, + 'Google': { 'Google TV': 1, 'Nexus': 1 }, + 'HP': { 'TouchPad': 1 }, + 'HTC': {}, + 'Huawei': {}, + 'Lenovo': {}, + 'LG': {}, + 'Microsoft': { 'Xbox': 1, 'Xbox One': 1 }, + 'Motorola': { 'Xoom': 1 }, + 'Nintendo': { 'Wii U': 1, 'Wii': 1 }, + 'Nokia': { 'Lumia': 1 }, + 'Oppo': {}, + 'Samsung': { 'Galaxy S': 1, 'Galaxy S2': 1, 'Galaxy S3': 1, 'Galaxy S4': 1 }, + 'Sony': { 'PlayStation': 1, 'PlayStation Vita': 1 }, + 'Xiaomi': { 'Mi': 1, 'Redmi': 1 } + }); + + /* Detectable operating systems (order is important). */ + var os = getOS([ + 'Windows Phone', + 'KaiOS', + 'Android', + 'CentOS', + { 'label': 'Chrome OS', 'pattern': 'CrOS' }, + 'Debian', + { 'label': 'DragonFly BSD', 'pattern': 'DragonFly' }, + 'Fedora', + 'FreeBSD', + 'Gentoo', + 'Haiku', + 'Kubuntu', + 'Linux Mint', + 'OpenBSD', + 'Red Hat', + 'SuSE', + 'Ubuntu', + 'Xubuntu', + 'Cygwin', + 'Symbian OS', + 'hpwOS', + 'webOS ', + 'webOS', + 'Tablet OS', + 'Tizen', + 'Linux', + 'Mac OS X', + 'Macintosh', + 'Mac', + 'Windows 98;', + 'Windows ' + ]); + + /*------------------------------------------------------------------------*/ + + /** + * Picks the layout engine from an array of guesses. + * + * @private + * @param {Array} guesses An array of guesses. + * @returns {null|string} The detected layout engine. + */ + function getLayout(guesses) { + return reduce(guesses, function(result, guess) { + return result || RegExp('\\b' + ( + guess.pattern || qualify(guess) + ) + '\\b', 'i').exec(ua) && (guess.label || guess); + }); + } + + /** + * Picks the manufacturer from an array of guesses. + * + * @private + * @param {Array} guesses An object of guesses. + * @returns {null|string} The detected manufacturer. + */ + function getManufacturer(guesses) { + return reduce(guesses, function(result, value, key) { + // Lookup the manufacturer by product or scan the UA for the manufacturer. + return result || ( + value[product] || + value[/^[a-z]+(?: +[a-z]+\b)*/i.exec(product)] || + RegExp('\\b' + qualify(key) + '(?:\\b|\\w*\\d)', 'i').exec(ua) + ) && key; + }); + } + + /** + * Picks the browser name from an array of guesses. + * + * @private + * @param {Array} guesses An array of guesses. + * @returns {null|string} The detected browser name. + */ + function getName(guesses) { + return reduce(guesses, function(result, guess) { + return result || RegExp('\\b' + ( + guess.pattern || qualify(guess) + ) + '\\b', 'i').exec(ua) && (guess.label || guess); + }); + } + + /** + * Picks the OS name from an array of guesses. + * + * @private + * @param {Array} guesses An array of guesses. + * @returns {null|string} The detected OS name. + */ + function getOS(guesses) { + return reduce(guesses, function(result, guess) { + var pattern = guess.pattern || qualify(guess); + if (!result && (result = + RegExp('\\b' + pattern + '(?:/[\\d.]+|[ \\w.]*)', 'i').exec(ua) + )) { + result = cleanupOS(result, pattern, guess.label || guess); + } + return result; + }); + } + + /** + * Picks the product name from an array of guesses. + * + * @private + * @param {Array} guesses An array of guesses. + * @returns {null|string} The detected product name. + */ + function getProduct(guesses) { + return reduce(guesses, function(result, guess) { + var pattern = guess.pattern || qualify(guess); + if (!result && (result = + RegExp('\\b' + pattern + ' *\\d+[.\\w_]*', 'i').exec(ua) || + RegExp('\\b' + pattern + ' *\\w+-[\\w]*', 'i').exec(ua) || + RegExp('\\b' + pattern + '(?:; *(?:[a-z]+[_-])?[a-z]+\\d+|[^ ();-]*)', 'i').exec(ua) + )) { + // Split by forward slash and append product version if needed. + if ((result = String((guess.label && !RegExp(pattern, 'i').test(guess.label)) ? guess.label : result).split('/'))[1] && !/[\d.]+/.test(result[0])) { + result[0] += ' ' + result[1]; + } + // Correct character case and cleanup string. + guess = guess.label || guess; + result = format(result[0] + .replace(RegExp(pattern, 'i'), guess) + .replace(RegExp('; *(?:' + guess + '[_-])?', 'i'), ' ') + .replace(RegExp('(' + guess + ')[-_.]?(\\w)', 'i'), '$1 $2')); + } + return result; + }); + } + + /** + * Resolves the version using an array of UA patterns. + * + * @private + * @param {Array} patterns An array of UA patterns. + * @returns {null|string} The detected version. + */ + function getVersion(patterns) { + return reduce(patterns, function(result, pattern) { + return result || (RegExp(pattern + + '(?:-[\\d.]+/|(?: for [\\w-]+)?[ /-])([\\d.]+[^ ();/_-]*)', 'i').exec(ua) || 0)[1] || null; + }); + } + + /** + * Returns `platform.description` when the platform object is coerced to a string. + * + * @name toString + * @memberOf platform + * @returns {string} Returns `platform.description` if available, else an empty string. + */ + function toStringPlatform() { + return this.description || ''; + } + + /*------------------------------------------------------------------------*/ + + // Convert layout to an array so we can add extra details. + layout && (layout = [layout]); + + // Detect Android products. + // Browsers on Android devices typically provide their product IDS after "Android;" + // up to "Build" or ") AppleWebKit". + // Example: + // "Mozilla/5.0 (Linux; Android 8.1.0; Moto G (5) Plus) AppleWebKit/537.36 + // (KHTML, like Gecko) Chrome/70.0.3538.80 Mobile Safari/537.36" + if (/\bAndroid\b/.test(os) && !product && + (data = /\bAndroid[^;]*;(.*?)(?:Build|\) AppleWebKit)\b/i.exec(ua))) { + product = trim(data[1]) + // Replace any language codes (eg. "en-US"). + .replace(/^[a-z]{2}-[a-z]{2};\s*/i, '') + || null; + } + // Detect product names that contain their manufacturer's name. + if (manufacturer && !product) { + product = getProduct([manufacturer]); + } else if (manufacturer && product) { + product = product + .replace(RegExp('^(' + qualify(manufacturer) + ')[-_.\\s]', 'i'), manufacturer + ' ') + .replace(RegExp('^(' + qualify(manufacturer) + ')[-_.]?(\\w)', 'i'), manufacturer + ' $2'); + } + // Clean up Google TV. + if ((data = /\bGoogle TV\b/.exec(product))) { + product = data[0]; + } + // Detect simulators. + if (/\bSimulator\b/i.test(ua)) { + product = (product ? product + ' ' : '') + 'Simulator'; + } + // Detect Opera Mini 8+ running in Turbo/Uncompressed mode on iOS. + if (name == 'Opera Mini' && /\bOPiOS\b/.test(ua)) { + description.push('running in Turbo/Uncompressed mode'); + } + // Detect IE Mobile 11. + if (name == 'IE' && /\blike iPhone OS\b/.test(ua)) { + data = parse(ua.replace(/like iPhone OS/, '')); + manufacturer = data.manufacturer; + product = data.product; + } + // Detect iOS. + else if (/^iP/.test(product)) { + name || (name = 'Safari'); + os = 'iOS' + ((data = / OS ([\d_]+)/i.exec(ua)) + ? ' ' + data[1].replace(/_/g, '.') + : ''); + } + // Detect Kubuntu. + else if (name == 'Konqueror' && /^Linux\b/i.test(os)) { + os = 'Kubuntu'; + } + // Detect Android browsers. + else if ((manufacturer && manufacturer != 'Google' && + ((/Chrome/.test(name) && !/\bMobile Safari\b/i.test(ua)) || /\bVita\b/.test(product))) || + (/\bAndroid\b/.test(os) && /^Chrome/.test(name) && /\bVersion\//i.test(ua))) { + name = 'Android Browser'; + os = /\bAndroid\b/.test(os) ? os : 'Android'; + } + // Detect Silk desktop/accelerated modes. + else if (name == 'Silk') { + if (!/\bMobi/i.test(ua)) { + os = 'Android'; + description.unshift('desktop mode'); + } + if (/Accelerated *= *true/i.test(ua)) { + description.unshift('accelerated'); + } + } + // Detect UC Browser speed mode. + else if (name == 'UC Browser' && /\bUCWEB\b/.test(ua)) { + description.push('speed mode'); + } + // Detect PaleMoon identifying as Firefox. + else if (name == 'PaleMoon' && (data = /\bFirefox\/([\d.]+)\b/.exec(ua))) { + description.push('identifying as Firefox ' + data[1]); + } + // Detect Firefox OS and products running Firefox. + else if (name == 'Firefox' && (data = /\b(Mobile|Tablet|TV)\b/i.exec(ua))) { + os || (os = 'Firefox OS'); + product || (product = data[1]); + } + // Detect false positives for Firefox/Safari. + else if (!name || (data = !/\bMinefield\b/i.test(ua) && /\b(?:Firefox|Safari)\b/.exec(name))) { + // Escape the `/` for Firefox 1. + if (name && !product && /[\/,]|^[^(]+?\)/.test(ua.slice(ua.indexOf(data + '/') + 8))) { + // Clear name of false positives. + name = null; + } + // Reassign a generic name. + if ((data = product || manufacturer || os) && + (product || manufacturer || /\b(?:Android|Symbian OS|Tablet OS|webOS)\b/.test(os))) { + name = /[a-z]+(?: Hat)?/i.exec(/\bAndroid\b/.test(os) ? os : data) + ' Browser'; + } + } + // Add Chrome version to description for Electron. + else if (name == 'Electron' && (data = (/\bChrome\/([\d.]+)\b/.exec(ua) || 0)[1])) { + description.push('Chromium ' + data); + } + // Detect non-Opera (Presto-based) versions (order is important). + if (!version) { + version = getVersion([ + '(?:Cloud9|CriOS|CrMo|Edge|Edg|EdgA|EdgiOS|FxiOS|HeadlessChrome|IEMobile|Iron|Opera ?Mini|OPiOS|OPR|Raven|SamsungBrowser|Silk(?!/[\\d.]+$)|UCBrowser|YaBrowser)', + 'Version', + qualify(name), + '(?:Firefox|Minefield|NetFront)' + ]); + } + // Detect stubborn layout engines. + if ((data = + layout == 'iCab' && parseFloat(version) > 3 && 'WebKit' || + /\bOpera\b/.test(name) && (/\bOPR\b/.test(ua) ? 'Blink' : 'Presto') || + /\b(?:Midori|Nook|Safari)\b/i.test(ua) && !/^(?:Trident|EdgeHTML)$/.test(layout) && 'WebKit' || + !layout && /\bMSIE\b/i.test(ua) && (os == 'Mac OS' ? 'Tasman' : 'Trident') || + layout == 'WebKit' && /\bPlayStation\b(?! Vita\b)/i.test(name) && 'NetFront' + )) { + layout = [data]; + } + // Detect Windows Phone 7 desktop mode. + if (name == 'IE' && (data = (/; *(?:XBLWP|ZuneWP)(\d+)/i.exec(ua) || 0)[1])) { + name += ' Mobile'; + os = 'Windows Phone ' + (/\+$/.test(data) ? data : data + '.x'); + description.unshift('desktop mode'); + } + // Detect Windows Phone 8.x desktop mode. + else if (/\bWPDesktop\b/i.test(ua)) { + name = 'IE Mobile'; + os = 'Windows Phone 8.x'; + description.unshift('desktop mode'); + version || (version = (/\brv:([\d.]+)/.exec(ua) || 0)[1]); + } + // Detect IE 11 identifying as other browsers. + else if (name != 'IE' && layout == 'Trident' && (data = /\brv:([\d.]+)/.exec(ua))) { + if (name) { + description.push('identifying as ' + name + (version ? ' ' + version : '')); + } + name = 'IE'; + version = data[1]; + } + // Leverage environment features. + if (useFeatures) { + // Detect server-side environments. + // Rhino has a global function while others have a global object. + if (isHostType(context, 'global')) { + if (java) { + data = java.lang.System; + arch = data.getProperty('os.arch'); + os = os || data.getProperty('os.name') + ' ' + data.getProperty('os.version'); + } + if (rhino) { + try { + version = context.require('ringo/engine').version.join('.'); + name = 'RingoJS'; + } catch(e) { + if ((data = context.system) && data.global.system == context.system) { + name = 'Narwhal'; + os || (os = data[0].os || null); + } + } + if (!name) { + name = 'Rhino'; + } + } + else if ( + typeof context.process == 'object' && !context.process.browser && + (data = context.process) + ) { + if (typeof data.versions == 'object') { + if (typeof data.versions.electron == 'string') { + description.push('Node ' + data.versions.node); + name = 'Electron'; + version = data.versions.electron; + } else if (typeof data.versions.nw == 'string') { + description.push('Chromium ' + version, 'Node ' + data.versions.node); + name = 'NW.js'; + version = data.versions.nw; + } + } + if (!name) { + name = 'Node.js'; + arch = data.arch; + os = data.platform; + version = /[\d.]+/.exec(data.version); + version = version ? version[0] : null; + } + } + } + // Detect Adobe AIR. + else if (getClassOf((data = context.runtime)) == airRuntimeClass) { + name = 'Adobe AIR'; + os = data.flash.system.Capabilities.os; + } + // Detect PhantomJS. + else if (getClassOf((data = context.phantom)) == phantomClass) { + name = 'PhantomJS'; + version = (data = data.version || null) && (data.major + '.' + data.minor + '.' + data.patch); + } + // Detect IE compatibility modes. + else if (typeof doc.documentMode == 'number' && (data = /\bTrident\/(\d+)/i.exec(ua))) { + // We're in compatibility mode when the Trident version + 4 doesn't + // equal the document mode. + version = [version, doc.documentMode]; + if ((data = +data[1] + 4) != version[1]) { + description.push('IE ' + version[1] + ' mode'); + layout && (layout[1] = ''); + version[1] = data; + } + version = name == 'IE' ? String(version[1].toFixed(1)) : version[0]; + } + // Detect IE 11 masking as other browsers. + else if (typeof doc.documentMode == 'number' && /^(?:Chrome|Firefox)\b/.test(name)) { + description.push('masking as ' + name + ' ' + version); + name = 'IE'; + version = '11.0'; + layout = ['Trident']; + os = 'Windows'; + } + os = os && format(os); + } + // Detect prerelease phases. + if (version && (data = + /(?:[ab]|dp|pre|[ab]\d+pre)(?:\d+\+?)?$/i.exec(version) || + /(?:alpha|beta)(?: ?\d)?/i.exec(ua + ';' + (useFeatures && nav.appMinorVersion)) || + /\bMinefield\b/i.test(ua) && 'a' + )) { + prerelease = /b/i.test(data) ? 'beta' : 'alpha'; + version = version.replace(RegExp(data + '\\+?$'), '') + + (prerelease == 'beta' ? beta : alpha) + (/\d+\+?/.exec(data) || ''); + } + // Detect Firefox Mobile. + if (name == 'Fennec' || name == 'Firefox' && /\b(?:Android|Firefox OS|KaiOS)\b/.test(os)) { + name = 'Firefox Mobile'; + } + // Obscure Maxthon's unreliable version. + else if (name == 'Maxthon' && version) { + version = version.replace(/\.[\d.]+/, '.x'); + } + // Detect Xbox 360 and Xbox One. + else if (/\bXbox\b/i.test(product)) { + if (product == 'Xbox 360') { + os = null; + } + if (product == 'Xbox 360' && /\bIEMobile\b/.test(ua)) { + description.unshift('mobile mode'); + } + } + // Add mobile postfix. + else if ((/^(?:Chrome|IE|Opera)$/.test(name) || name && !product && !/Browser|Mobi/.test(name)) && + (os == 'Windows CE' || /Mobi/i.test(ua))) { + name += ' Mobile'; + } + // Detect IE platform preview. + else if (name == 'IE' && useFeatures) { + try { + if (context.external === null) { + description.unshift('platform preview'); + } + } catch(e) { + description.unshift('embedded'); + } + } + // Detect BlackBerry OS version. + // http://docs.blackberry.com/en/developers/deliverables/18169/HTTP_headers_sent_by_BB_Browser_1234911_11.jsp + else if ((/\bBlackBerry\b/.test(product) || /\bBB10\b/.test(ua)) && (data = + (RegExp(product.replace(/ +/g, ' *') + '/([.\\d]+)', 'i').exec(ua) || 0)[1] || + version + )) { + data = [data, /BB10/.test(ua)]; + os = (data[1] ? (product = null, manufacturer = 'BlackBerry') : 'Device Software') + ' ' + data[0]; + version = null; + } + // Detect Opera identifying/masking itself as another browser. + // http://www.opera.com/support/kb/view/843/ + else if (this != forOwn && product != 'Wii' && ( + (useFeatures && opera) || + (/Opera/.test(name) && /\b(?:MSIE|Firefox)\b/i.test(ua)) || + (name == 'Firefox' && /\bOS X (?:\d+\.){2,}/.test(os)) || + (name == 'IE' && ( + (os && !/^Win/.test(os) && version > 5.5) || + /\bWindows XP\b/.test(os) && version > 8 || + version == 8 && !/\bTrident\b/.test(ua) + )) + ) && !reOpera.test((data = parse.call(forOwn, ua.replace(reOpera, '') + ';'))) && data.name) { + // When "identifying", the UA contains both Opera and the other browser's name. + data = 'ing as ' + data.name + ((data = data.version) ? ' ' + data : ''); + if (reOpera.test(name)) { + if (/\bIE\b/.test(data) && os == 'Mac OS') { + os = null; + } + data = 'identify' + data; + } + // When "masking", the UA contains only the other browser's name. + else { + data = 'mask' + data; + if (operaClass) { + name = format(operaClass.replace(/([a-z])([A-Z])/g, '$1 $2')); + } else { + name = 'Opera'; + } + if (/\bIE\b/.test(data)) { + os = null; + } + if (!useFeatures) { + version = null; + } + } + layout = ['Presto']; + description.push(data); + } + // Detect WebKit Nightly and approximate Chrome/Safari versions. + if ((data = (/\bAppleWebKit\/([\d.]+\+?)/i.exec(ua) || 0)[1])) { + // Correct build number for numeric comparison. + // (e.g. "532.5" becomes "532.05") + data = [parseFloat(data.replace(/\.(\d)$/, '.0$1')), data]; + // Nightly builds are postfixed with a "+". + if (name == 'Safari' && data[1].slice(-1) == '+') { + name = 'WebKit Nightly'; + prerelease = 'alpha'; + version = data[1].slice(0, -1); + } + // Clear incorrect browser versions. + else if (version == data[1] || + version == (data[2] = (/\bSafari\/([\d.]+\+?)/i.exec(ua) || 0)[1])) { + version = null; + } + // Use the full Chrome version when available. + data[1] = (/\b(?:Headless)?Chrome\/([\d.]+)/i.exec(ua) || 0)[1]; + // Detect Blink layout engine. + if (data[0] == 537.36 && data[2] == 537.36 && parseFloat(data[1]) >= 28 && layout == 'WebKit') { + layout = ['Blink']; + } + // Detect JavaScriptCore. + // http://stackoverflow.com/questions/6768474/how-can-i-detect-which-javascript-engine-v8-or-jsc-is-used-at-runtime-in-androi + if (!useFeatures || (!likeChrome && !data[1])) { + layout && (layout[1] = 'like Safari'); + data = (data = data[0], data < 400 ? 1 : data < 500 ? 2 : data < 526 ? 3 : data < 533 ? 4 : data < 534 ? '4+' : data < 535 ? 5 : data < 537 ? 6 : data < 538 ? 7 : data < 601 ? 8 : data < 602 ? 9 : data < 604 ? 10 : data < 606 ? 11 : data < 608 ? 12 : '12'); + } else { + layout && (layout[1] = 'like Chrome'); + data = data[1] || (data = data[0], data < 530 ? 1 : data < 532 ? 2 : data < 532.05 ? 3 : data < 533 ? 4 : data < 534.03 ? 5 : data < 534.07 ? 6 : data < 534.10 ? 7 : data < 534.13 ? 8 : data < 534.16 ? 9 : data < 534.24 ? 10 : data < 534.30 ? 11 : data < 535.01 ? 12 : data < 535.02 ? '13+' : data < 535.07 ? 15 : data < 535.11 ? 16 : data < 535.19 ? 17 : data < 536.05 ? 18 : data < 536.10 ? 19 : data < 537.01 ? 20 : data < 537.11 ? '21+' : data < 537.13 ? 23 : data < 537.18 ? 24 : data < 537.24 ? 25 : data < 537.36 ? 26 : layout != 'Blink' ? '27' : '28'); + } + // Add the postfix of ".x" or "+" for approximate versions. + layout && (layout[1] += ' ' + (data += typeof data == 'number' ? '.x' : /[.+]/.test(data) ? '' : '+')); + // Obscure version for some Safari 1-2 releases. + if (name == 'Safari' && (!version || parseInt(version) > 45)) { + version = data; + } else if (name == 'Chrome' && /\bHeadlessChrome/i.test(ua)) { + description.unshift('headless'); + } + } + // Detect Opera desktop modes. + if (name == 'Opera' && (data = /\bzbov|zvav$/.exec(os))) { + name += ' '; + description.unshift('desktop mode'); + if (data == 'zvav') { + name += 'Mini'; + version = null; + } else { + name += 'Mobile'; + } + os = os.replace(RegExp(' *' + data + '$'), ''); + } + // Detect Chrome desktop mode. + else if (name == 'Safari' && /\bChrome\b/.exec(layout && layout[1])) { + description.unshift('desktop mode'); + name = 'Chrome Mobile'; + version = null; + + if (/\bOS X\b/.test(os)) { + manufacturer = 'Apple'; + os = 'iOS 4.3+'; + } else { + os = null; + } + } + // Newer versions of SRWare Iron uses the Chrome tag to indicate its version number. + else if (/\bSRWare Iron\b/.test(name) && !version) { + version = getVersion('Chrome'); + } + // Strip incorrect OS versions. + if (version && version.indexOf((data = /[\d.]+$/.exec(os))) == 0 && + ua.indexOf('/' + data + '-') > -1) { + os = trim(os.replace(data, '')); + } + // Ensure OS does not include the browser name. + if (os && os.indexOf(name) != -1 && !RegExp(name + ' OS').test(os)) { + os = os.replace(RegExp(' *' + qualify(name) + ' *'), ''); + } + // Add layout engine. + if (layout && !/\b(?:Avant|Nook)\b/.test(name) && ( + /Browser|Lunascape|Maxthon/.test(name) || + name != 'Safari' && /^iOS/.test(os) && /\bSafari\b/.test(layout[1]) || + /^(?:Adobe|Arora|Breach|Midori|Opera|Phantom|Rekonq|Rock|Samsung Internet|Sleipnir|SRWare Iron|Vivaldi|Web)/.test(name) && layout[1])) { + // Don't add layout details to description if they are falsey. + (data = layout[layout.length - 1]) && description.push(data); + } + // Combine contextual information. + if (description.length) { + description = ['(' + description.join('; ') + ')']; + } + // Append manufacturer to description. + if (manufacturer && product && product.indexOf(manufacturer) < 0) { + description.push('on ' + manufacturer); + } + // Append product to description. + if (product) { + description.push((/^on /.test(description[description.length - 1]) ? '' : 'on ') + product); + } + // Parse the OS into an object. + if (os) { + data = / ([\d.+]+)$/.exec(os); + isSpecialCasedOS = data && os.charAt(os.length - data[0].length - 1) == '/'; + os = { + 'architecture': 32, + 'family': (data && !isSpecialCasedOS) ? os.replace(data[0], '') : os, + 'version': data ? data[1] : null, + 'toString': function() { + var version = this.version; + return this.family + ((version && !isSpecialCasedOS) ? ' ' + version : '') + (this.architecture == 64 ? ' 64-bit' : ''); + } + }; + } + // Add browser/OS architecture. + if ((data = /\b(?:AMD|IA|Win|WOW|x86_|x)64\b/i.exec(arch)) && !/\bi686\b/i.test(arch)) { + if (os) { + os.architecture = 64; + os.family = os.family.replace(RegExp(' *' + data), ''); + } + if ( + name && (/\bWOW64\b/i.test(ua) || + (useFeatures && /\w(?:86|32)$/.test(nav.cpuClass || nav.platform) && !/\bWin64; x64\b/i.test(ua))) + ) { + description.unshift('32-bit'); + } + } + // Chrome 39 and above on OS X is always 64-bit. + else if ( + os && /^OS X/.test(os.family) && + name == 'Chrome' && parseFloat(version) >= 39 + ) { + os.architecture = 64; + } + + ua || (ua = null); + + /*------------------------------------------------------------------------*/ + + /** + * The platform object. + * + * @name platform + * @type Object + */ + var platform = {}; + + /** + * The platform description. + * + * @memberOf platform + * @type string|null + */ + platform.description = ua; + + /** + * The name of the browser's layout engine. + * + * The list of common layout engines include: + * "Blink", "EdgeHTML", "Gecko", "Trident" and "WebKit" + * + * @memberOf platform + * @type string|null + */ + platform.layout = layout && layout[0]; + + /** + * The name of the product's manufacturer. + * + * The list of manufacturers include: + * "Apple", "Archos", "Amazon", "Asus", "Barnes & Noble", "BlackBerry", + * "Google", "HP", "HTC", "LG", "Microsoft", "Motorola", "Nintendo", + * "Nokia", "Samsung" and "Sony" + * + * @memberOf platform + * @type string|null + */ + platform.manufacturer = manufacturer; + + /** + * The name of the browser/environment. + * + * The list of common browser names include: + * "Chrome", "Electron", "Firefox", "Firefox for iOS", "IE", + * "Microsoft Edge", "PhantomJS", "Safari", "SeaMonkey", "Silk", + * "Opera Mini" and "Opera" + * + * Mobile versions of some browsers have "Mobile" appended to their name: + * eg. "Chrome Mobile", "Firefox Mobile", "IE Mobile" and "Opera Mobile" + * + * @memberOf platform + * @type string|null + */ + platform.name = name; + + /** + * The alpha/beta release indicator. + * + * @memberOf platform + * @type string|null + */ + platform.prerelease = prerelease; + + /** + * The name of the product hosting the browser. + * + * The list of common products include: + * + * "BlackBerry", "Galaxy S4", "Lumia", "iPad", "iPod", "iPhone", "Kindle", + * "Kindle Fire", "Nexus", "Nook", "PlayBook", "TouchPad" and "Transformer" + * + * @memberOf platform + * @type string|null + */ + platform.product = product; + + /** + * The browser's user agent string. + * + * @memberOf platform + * @type string|null + */ + platform.ua = ua; + + /** + * The browser/environment version. + * + * @memberOf platform + * @type string|null + */ + platform.version = name && version; + + /** + * The name of the operating system. + * + * @memberOf platform + * @type Object + */ + platform.os = os || { + + /** + * The CPU architecture the OS is built for. + * + * @memberOf platform.os + * @type number|null + */ + 'architecture': null, + + /** + * The family of the OS. + * + * Common values include: + * "Windows", "Windows Server 2008 R2 / 7", "Windows Server 2008 / Vista", + * "Windows XP", "OS X", "Linux", "Ubuntu", "Debian", "Fedora", "Red Hat", + * "SuSE", "Android", "iOS" and "Windows Phone" + * + * @memberOf platform.os + * @type string|null + */ + 'family': null, + + /** + * The version of the OS. + * + * @memberOf platform.os + * @type string|null + */ + 'version': null, + + /** + * Returns the OS string. + * + * @memberOf platform.os + * @returns {string} The OS string. + */ + 'toString': function() { return 'null'; } + }; + + platform.parse = parse; + platform.toString = toStringPlatform; + + if (platform.version) { + description.unshift(version); + } + if (platform.name) { + description.unshift(name); + } + if (os && name && !(os == String(os).split(' ')[0] && (os == name.split(' ')[0] || product))) { + description.push(product ? '(' + os + ')' : 'on ' + os); + } + if (description.length) { + platform.description = description.join(' '); + } + return platform; + } + + /*--------------------------------------------------------------------------*/ + + // Export platform. + var platform = parse(); + + // Some AMD build optimizers, like r.js, check for condition patterns like the following: + if (typeof define == 'function' && typeof define.amd == 'object' && define.amd) { + // Expose platform on the global object to prevent errors when platform is + // loaded by a script tag in the presence of an AMD loader. + // See http://requirejs.org/docs/errors.html#mismatch for more details. + root.platform = platform; + + // Define as an anonymous module so platform can be aliased through path mapping. + define(function() { + return platform; + }); + } + // Check for `exports` after `define` in case a build optimizer adds an `exports` object. + else if (freeExports && freeModule) { + // Export for CommonJS support. + forOwn(platform, function(value, key) { + freeExports[key] = value; + }); + } + else { + // Export to the global object. + root.platform = platform; + } +}.call(this)); + + +/***/ }), + +/***/ 63329: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var parseUrl = (__nccwpck_require__(57310).parse); + +var DEFAULT_PORTS = { + ftp: 21, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443, +}; + +var stringEndsWith = String.prototype.endsWith || function(s) { + return s.length <= this.length && + this.indexOf(s, this.length - s.length) !== -1; +}; + +/** + * @param {string|object} url - The URL, or the result from url.parse. + * @return {string} The URL of the proxy that should handle the request to the + * given URL. If no proxy is set, this will be an empty string. + */ +function getProxyForUrl(url) { + var parsedUrl = typeof url === 'string' ? parseUrl(url) : url || {}; + var proto = parsedUrl.protocol; + var hostname = parsedUrl.host; + var port = parsedUrl.port; + if (typeof hostname !== 'string' || !hostname || typeof proto !== 'string') { + return ''; // Don't proxy URLs without a valid scheme or host. + } + + proto = proto.split(':', 1)[0]; + // Stripping ports in this way instead of using parsedUrl.hostname to make + // sure that the brackets around IPv6 addresses are kept. + hostname = hostname.replace(/:\d*$/, ''); + port = parseInt(port) || DEFAULT_PORTS[proto] || 0; + if (!shouldProxy(hostname, port)) { + return ''; // Don't proxy URLs that match NO_PROXY. + } + + var proxy = + getEnv('npm_config_' + proto + '_proxy') || + getEnv(proto + '_proxy') || + getEnv('npm_config_proxy') || + getEnv('all_proxy'); + if (proxy && proxy.indexOf('://') === -1) { + // Missing scheme in proxy, default to the requested URL's scheme. + proxy = proto + '://' + proxy; + } + return proxy; +} + +/** + * Determines whether a given URL should be proxied. + * + * @param {string} hostname - The host name of the URL. + * @param {number} port - The effective port of the URL. + * @returns {boolean} Whether the given URL should be proxied. + * @private + */ +function shouldProxy(hostname, port) { + var NO_PROXY = + (getEnv('npm_config_no_proxy') || getEnv('no_proxy')).toLowerCase(); + if (!NO_PROXY) { + return true; // Always proxy if NO_PROXY is not set. + } + if (NO_PROXY === '*') { + return false; // Never proxy if wildcard is set. + } + + return NO_PROXY.split(/[,\s]/).every(function(proxy) { + if (!proxy) { + return true; // Skip zero-length hosts. + } + var parsedProxy = proxy.match(/^(.+):(\d+)$/); + var parsedProxyHostname = parsedProxy ? parsedProxy[1] : proxy; + var parsedProxyPort = parsedProxy ? parseInt(parsedProxy[2]) : 0; + if (parsedProxyPort && parsedProxyPort !== port) { + return true; // Skip if ports don't match. + } + + if (!/^[.*]/.test(parsedProxyHostname)) { + // No wildcards, so stop proxying if there is an exact match. + return hostname !== parsedProxyHostname; + } + + if (parsedProxyHostname.charAt(0) === '*') { + // Remove leading wildcard. + parsedProxyHostname = parsedProxyHostname.slice(1); + } + // Stop proxying if the hostname ends with the no_proxy host. + return !stringEndsWith.call(hostname, parsedProxyHostname); + }); +} + +/** + * Get the value for an environment variable. + * + * @param {string} key - The name of the environment variable. + * @return {string} The value of the environment variable. + * @private + */ +function getEnv(key) { + return process.env[key.toLowerCase()] || process.env[key.toUpperCase()] || ''; +} + +exports.getProxyForUrl = getProxyForUrl; + + +/***/ }), + +/***/ 18341: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var once = __nccwpck_require__(1223) +var eos = __nccwpck_require__(81205) +var fs = __nccwpck_require__(57147) // we only need fs to get the ReadStream and WriteStream prototypes + +var noop = function () {} +var ancient = /^v?\.0/.test(process.version) + +var isFn = function (fn) { + return typeof fn === 'function' +} + +var isFS = function (stream) { + if (!ancient) return false // newer node version do not need to care about fs is a special way + if (!fs) return false // browser + return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +} + +var isRequest = function (stream) { + return stream.setHeader && isFn(stream.abort) +} + +var destroyer = function (stream, reading, writing, callback) { + callback = once(callback) + + var closed = false + stream.on('close', function () { + closed = true + }) + + eos(stream, {readable: reading, writable: writing}, function (err) { + if (err) return callback(err) + closed = true + callback() + }) + + var destroyed = false + return function (err) { + if (closed) return + if (destroyed) return + destroyed = true + + if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks + if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + + if (isFn(stream.destroy)) return stream.destroy() + + callback(err || new Error('stream was destroyed')) + } +} + +var call = function (fn) { + fn() +} + +var pipe = function (from, to) { + return from.pipe(to) +} + +var pump = function () { + var streams = Array.prototype.slice.call(arguments) + var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop + + if (Array.isArray(streams[0])) streams = streams[0] + if (streams.length < 2) throw new Error('pump requires two streams per minimum') + + var error + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1 + var writing = i > 0 + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err + if (err) destroys.forEach(call) + if (reading) return + destroys.forEach(call) + callback(error) + }) + }) + + return streams.reduce(pipe) +} + +module.exports = pump + + +/***/ }), + +/***/ 74907: +/***/ ((module) => { + +"use strict"; + + +var replace = String.prototype.replace; +var percentTwenties = /%20/g; + +var Format = { + RFC1738: 'RFC1738', + RFC3986: 'RFC3986' +}; + +module.exports = { + 'default': Format.RFC3986, + formatters: { + RFC1738: function (value) { + return replace.call(value, percentTwenties, '+'); + }, + RFC3986: function (value) { + return String(value); + } + }, + RFC1738: Format.RFC1738, + RFC3986: Format.RFC3986 +}; + + +/***/ }), + +/***/ 22760: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var stringify = __nccwpck_require__(79954); +var parse = __nccwpck_require__(33912); +var formats = __nccwpck_require__(74907); + +module.exports = { + formats: formats, + parse: parse, + stringify: stringify +}; + + +/***/ }), + +/***/ 33912: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var utils = __nccwpck_require__(72360); + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var defaults = { + allowDots: false, + allowEmptyArrays: false, + allowPrototypes: false, + allowSparse: false, + arrayLimit: 20, + charset: 'utf-8', + charsetSentinel: false, + comma: false, + decodeDotInKeys: false, + decoder: utils.decode, + delimiter: '&', + depth: 5, + duplicates: 'combine', + ignoreQueryPrefix: false, + interpretNumericEntities: false, + parameterLimit: 1000, + parseArrays: true, + plainObjects: false, + strictNullHandling: false +}; + +var interpretNumericEntities = function (str) { + return str.replace(/&#(\d+);/g, function ($0, numberStr) { + return String.fromCharCode(parseInt(numberStr, 10)); + }); +}; + +var parseArrayValue = function (val, options) { + if (val && typeof val === 'string' && options.comma && val.indexOf(',') > -1) { + return val.split(','); + } + + return val; +}; + +// This is what browsers will submit when the ✓ character occurs in an +// application/x-www-form-urlencoded body and the encoding of the page containing +// the form is iso-8859-1, or when the submitted form has an accept-charset +// attribute of iso-8859-1. Presumably also with other charsets that do not contain +// the ✓ character, such as us-ascii. +var isoSentinel = 'utf8=%26%2310003%3B'; // encodeURIComponent('✓') + +// These are the percent-encoded utf-8 octets representing a checkmark, indicating that the request actually is utf-8 encoded. +var charsetSentinel = 'utf8=%E2%9C%93'; // encodeURIComponent('✓') + +var parseValues = function parseQueryStringValues(str, options) { + var obj = { __proto__: null }; + + var cleanStr = options.ignoreQueryPrefix ? str.replace(/^\?/, '') : str; + var limit = options.parameterLimit === Infinity ? undefined : options.parameterLimit; + var parts = cleanStr.split(options.delimiter, limit); + var skipIndex = -1; // Keep track of where the utf8 sentinel was found + var i; + + var charset = options.charset; + if (options.charsetSentinel) { + for (i = 0; i < parts.length; ++i) { + if (parts[i].indexOf('utf8=') === 0) { + if (parts[i] === charsetSentinel) { + charset = 'utf-8'; + } else if (parts[i] === isoSentinel) { + charset = 'iso-8859-1'; + } + skipIndex = i; + i = parts.length; // The eslint settings do not allow break; + } + } + } + + for (i = 0; i < parts.length; ++i) { + if (i === skipIndex) { + continue; + } + var part = parts[i]; + + var bracketEqualsPos = part.indexOf(']='); + var pos = bracketEqualsPos === -1 ? part.indexOf('=') : bracketEqualsPos + 1; + + var key, val; + if (pos === -1) { + key = options.decoder(part, defaults.decoder, charset, 'key'); + val = options.strictNullHandling ? null : ''; + } else { + key = options.decoder(part.slice(0, pos), defaults.decoder, charset, 'key'); + val = utils.maybeMap( + parseArrayValue(part.slice(pos + 1), options), + function (encodedVal) { + return options.decoder(encodedVal, defaults.decoder, charset, 'value'); + } + ); + } + + if (val && options.interpretNumericEntities && charset === 'iso-8859-1') { + val = interpretNumericEntities(val); + } + + if (part.indexOf('[]=') > -1) { + val = isArray(val) ? [val] : val; + } + + var existing = has.call(obj, key); + if (existing && options.duplicates === 'combine') { + obj[key] = utils.combine(obj[key], val); + } else if (!existing || options.duplicates === 'last') { + obj[key] = val; + } + } + + return obj; +}; + +var parseObject = function (chain, val, options, valuesParsed) { + var leaf = valuesParsed ? val : parseArrayValue(val, options); + + for (var i = chain.length - 1; i >= 0; --i) { + var obj; + var root = chain[i]; + + if (root === '[]' && options.parseArrays) { + obj = options.allowEmptyArrays && leaf === '' ? [] : [].concat(leaf); + } else { + obj = options.plainObjects ? Object.create(null) : {}; + var cleanRoot = root.charAt(0) === '[' && root.charAt(root.length - 1) === ']' ? root.slice(1, -1) : root; + var decodedRoot = options.decodeDotInKeys ? cleanRoot.replace(/%2E/g, '.') : cleanRoot; + var index = parseInt(decodedRoot, 10); + if (!options.parseArrays && decodedRoot === '') { + obj = { 0: leaf }; + } else if ( + !isNaN(index) + && root !== decodedRoot + && String(index) === decodedRoot + && index >= 0 + && (options.parseArrays && index <= options.arrayLimit) + ) { + obj = []; + obj[index] = leaf; + } else if (decodedRoot !== '__proto__') { + obj[decodedRoot] = leaf; + } + } + + leaf = obj; + } + + return leaf; +}; + +var parseKeys = function parseQueryStringKeys(givenKey, val, options, valuesParsed) { + if (!givenKey) { + return; + } + + // Transform dot notation to bracket notation + var key = options.allowDots ? givenKey.replace(/\.([^.[]+)/g, '[$1]') : givenKey; + + // The regex chunks + + var brackets = /(\[[^[\]]*])/; + var child = /(\[[^[\]]*])/g; + + // Get the parent + + var segment = options.depth > 0 && brackets.exec(key); + var parent = segment ? key.slice(0, segment.index) : key; + + // Stash the parent if it exists + + var keys = []; + if (parent) { + // If we aren't using plain objects, optionally prefix keys that would overwrite object prototype properties + if (!options.plainObjects && has.call(Object.prototype, parent)) { + if (!options.allowPrototypes) { + return; + } + } + + keys.push(parent); + } + + // Loop through children appending to the array until we hit depth + + var i = 0; + while (options.depth > 0 && (segment = child.exec(key)) !== null && i < options.depth) { + i += 1; + if (!options.plainObjects && has.call(Object.prototype, segment[1].slice(1, -1))) { + if (!options.allowPrototypes) { + return; + } + } + keys.push(segment[1]); + } + + // If there's a remainder, just add whatever is left + + if (segment) { + keys.push('[' + key.slice(segment.index) + ']'); + } + + return parseObject(keys, val, options, valuesParsed); +}; + +var normalizeParseOptions = function normalizeParseOptions(opts) { + if (!opts) { + return defaults; + } + + if (typeof opts.allowEmptyArrays !== 'undefined' && typeof opts.allowEmptyArrays !== 'boolean') { + throw new TypeError('`allowEmptyArrays` option can only be `true` or `false`, when provided'); + } + + if (typeof opts.decodeDotInKeys !== 'undefined' && typeof opts.decodeDotInKeys !== 'boolean') { + throw new TypeError('`decodeDotInKeys` option can only be `true` or `false`, when provided'); + } + + if (opts.decoder !== null && typeof opts.decoder !== 'undefined' && typeof opts.decoder !== 'function') { + throw new TypeError('Decoder has to be a function.'); + } + + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + var charset = typeof opts.charset === 'undefined' ? defaults.charset : opts.charset; + + var duplicates = typeof opts.duplicates === 'undefined' ? defaults.duplicates : opts.duplicates; + + if (duplicates !== 'combine' && duplicates !== 'first' && duplicates !== 'last') { + throw new TypeError('The duplicates option must be either combine, first, or last'); + } + + var allowDots = typeof opts.allowDots === 'undefined' ? opts.decodeDotInKeys === true ? true : defaults.allowDots : !!opts.allowDots; + + return { + allowDots: allowDots, + allowEmptyArrays: typeof opts.allowEmptyArrays === 'boolean' ? !!opts.allowEmptyArrays : defaults.allowEmptyArrays, + allowPrototypes: typeof opts.allowPrototypes === 'boolean' ? opts.allowPrototypes : defaults.allowPrototypes, + allowSparse: typeof opts.allowSparse === 'boolean' ? opts.allowSparse : defaults.allowSparse, + arrayLimit: typeof opts.arrayLimit === 'number' ? opts.arrayLimit : defaults.arrayLimit, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + comma: typeof opts.comma === 'boolean' ? opts.comma : defaults.comma, + decodeDotInKeys: typeof opts.decodeDotInKeys === 'boolean' ? opts.decodeDotInKeys : defaults.decodeDotInKeys, + decoder: typeof opts.decoder === 'function' ? opts.decoder : defaults.decoder, + delimiter: typeof opts.delimiter === 'string' || utils.isRegExp(opts.delimiter) ? opts.delimiter : defaults.delimiter, + // eslint-disable-next-line no-implicit-coercion, no-extra-parens + depth: (typeof opts.depth === 'number' || opts.depth === false) ? +opts.depth : defaults.depth, + duplicates: duplicates, + ignoreQueryPrefix: opts.ignoreQueryPrefix === true, + interpretNumericEntities: typeof opts.interpretNumericEntities === 'boolean' ? opts.interpretNumericEntities : defaults.interpretNumericEntities, + parameterLimit: typeof opts.parameterLimit === 'number' ? opts.parameterLimit : defaults.parameterLimit, + parseArrays: opts.parseArrays !== false, + plainObjects: typeof opts.plainObjects === 'boolean' ? opts.plainObjects : defaults.plainObjects, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (str, opts) { + var options = normalizeParseOptions(opts); + + if (str === '' || str === null || typeof str === 'undefined') { + return options.plainObjects ? Object.create(null) : {}; + } + + var tempObj = typeof str === 'string' ? parseValues(str, options) : str; + var obj = options.plainObjects ? Object.create(null) : {}; + + // Iterate over the keys and setup the new object + + var keys = Object.keys(tempObj); + for (var i = 0; i < keys.length; ++i) { + var key = keys[i]; + var newObj = parseKeys(key, tempObj[key], options, typeof str === 'string'); + obj = utils.merge(obj, newObj, options); + } + + if (options.allowSparse === true) { + return obj; + } + + return utils.compact(obj); +}; + + +/***/ }), + +/***/ 79954: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var getSideChannel = __nccwpck_require__(14334); +var utils = __nccwpck_require__(72360); +var formats = __nccwpck_require__(74907); +var has = Object.prototype.hasOwnProperty; + +var arrayPrefixGenerators = { + brackets: function brackets(prefix) { + return prefix + '[]'; + }, + comma: 'comma', + indices: function indices(prefix, key) { + return prefix + '[' + key + ']'; + }, + repeat: function repeat(prefix) { + return prefix; + } +}; + +var isArray = Array.isArray; +var push = Array.prototype.push; +var pushToArray = function (arr, valueOrArray) { + push.apply(arr, isArray(valueOrArray) ? valueOrArray : [valueOrArray]); +}; + +var toISO = Date.prototype.toISOString; + +var defaultFormat = formats['default']; +var defaults = { + addQueryPrefix: false, + allowDots: false, + allowEmptyArrays: false, + arrayFormat: 'indices', + charset: 'utf-8', + charsetSentinel: false, + delimiter: '&', + encode: true, + encodeDotInKeys: false, + encoder: utils.encode, + encodeValuesOnly: false, + format: defaultFormat, + formatter: formats.formatters[defaultFormat], + // deprecated + indices: false, + serializeDate: function serializeDate(date) { + return toISO.call(date); + }, + skipNulls: false, + strictNullHandling: false +}; + +var isNonNullishPrimitive = function isNonNullishPrimitive(v) { + return typeof v === 'string' + || typeof v === 'number' + || typeof v === 'boolean' + || typeof v === 'symbol' + || typeof v === 'bigint'; +}; + +var sentinel = {}; + +var stringify = function stringify( + object, + prefix, + generateArrayPrefix, + commaRoundTrip, + allowEmptyArrays, + strictNullHandling, + skipNulls, + encodeDotInKeys, + encoder, + filter, + sort, + allowDots, + serializeDate, + format, + formatter, + encodeValuesOnly, + charset, + sideChannel +) { + var obj = object; + + var tmpSc = sideChannel; + var step = 0; + var findFlag = false; + while ((tmpSc = tmpSc.get(sentinel)) !== void undefined && !findFlag) { + // Where object last appeared in the ref tree + var pos = tmpSc.get(object); + step += 1; + if (typeof pos !== 'undefined') { + if (pos === step) { + throw new RangeError('Cyclic object value'); + } else { + findFlag = true; // Break while + } + } + if (typeof tmpSc.get(sentinel) === 'undefined') { + step = 0; + } + } + + if (typeof filter === 'function') { + obj = filter(prefix, obj); + } else if (obj instanceof Date) { + obj = serializeDate(obj); + } else if (generateArrayPrefix === 'comma' && isArray(obj)) { + obj = utils.maybeMap(obj, function (value) { + if (value instanceof Date) { + return serializeDate(value); + } + return value; + }); + } + + if (obj === null) { + if (strictNullHandling) { + return encoder && !encodeValuesOnly ? encoder(prefix, defaults.encoder, charset, 'key', format) : prefix; + } + + obj = ''; + } + + if (isNonNullishPrimitive(obj) || utils.isBuffer(obj)) { + if (encoder) { + var keyValue = encodeValuesOnly ? prefix : encoder(prefix, defaults.encoder, charset, 'key', format); + return [formatter(keyValue) + '=' + formatter(encoder(obj, defaults.encoder, charset, 'value', format))]; + } + return [formatter(prefix) + '=' + formatter(String(obj))]; + } + + var values = []; + + if (typeof obj === 'undefined') { + return values; + } + + var objKeys; + if (generateArrayPrefix === 'comma' && isArray(obj)) { + // we need to join elements in + if (encodeValuesOnly && encoder) { + obj = utils.maybeMap(obj, encoder); + } + objKeys = [{ value: obj.length > 0 ? obj.join(',') || null : void undefined }]; + } else if (isArray(filter)) { + objKeys = filter; + } else { + var keys = Object.keys(obj); + objKeys = sort ? keys.sort(sort) : keys; + } + + var encodedPrefix = encodeDotInKeys ? prefix.replace(/\./g, '%2E') : prefix; + + var adjustedPrefix = commaRoundTrip && isArray(obj) && obj.length === 1 ? encodedPrefix + '[]' : encodedPrefix; + + if (allowEmptyArrays && isArray(obj) && obj.length === 0) { + return adjustedPrefix + '[]'; + } + + for (var j = 0; j < objKeys.length; ++j) { + var key = objKeys[j]; + var value = typeof key === 'object' && typeof key.value !== 'undefined' ? key.value : obj[key]; + + if (skipNulls && value === null) { + continue; + } + + var encodedKey = allowDots && encodeDotInKeys ? key.replace(/\./g, '%2E') : key; + var keyPrefix = isArray(obj) + ? typeof generateArrayPrefix === 'function' ? generateArrayPrefix(adjustedPrefix, encodedKey) : adjustedPrefix + : adjustedPrefix + (allowDots ? '.' + encodedKey : '[' + encodedKey + ']'); + + sideChannel.set(object, step); + var valueSideChannel = getSideChannel(); + valueSideChannel.set(sentinel, sideChannel); + pushToArray(values, stringify( + value, + keyPrefix, + generateArrayPrefix, + commaRoundTrip, + allowEmptyArrays, + strictNullHandling, + skipNulls, + encodeDotInKeys, + generateArrayPrefix === 'comma' && encodeValuesOnly && isArray(obj) ? null : encoder, + filter, + sort, + allowDots, + serializeDate, + format, + formatter, + encodeValuesOnly, + charset, + valueSideChannel + )); + } + + return values; +}; + +var normalizeStringifyOptions = function normalizeStringifyOptions(opts) { + if (!opts) { + return defaults; + } + + if (typeof opts.allowEmptyArrays !== 'undefined' && typeof opts.allowEmptyArrays !== 'boolean') { + throw new TypeError('`allowEmptyArrays` option can only be `true` or `false`, when provided'); + } + + if (typeof opts.encodeDotInKeys !== 'undefined' && typeof opts.encodeDotInKeys !== 'boolean') { + throw new TypeError('`encodeDotInKeys` option can only be `true` or `false`, when provided'); + } + + if (opts.encoder !== null && typeof opts.encoder !== 'undefined' && typeof opts.encoder !== 'function') { + throw new TypeError('Encoder has to be a function.'); + } + + var charset = opts.charset || defaults.charset; + if (typeof opts.charset !== 'undefined' && opts.charset !== 'utf-8' && opts.charset !== 'iso-8859-1') { + throw new TypeError('The charset option must be either utf-8, iso-8859-1, or undefined'); + } + + var format = formats['default']; + if (typeof opts.format !== 'undefined') { + if (!has.call(formats.formatters, opts.format)) { + throw new TypeError('Unknown format option provided.'); + } + format = opts.format; + } + var formatter = formats.formatters[format]; + + var filter = defaults.filter; + if (typeof opts.filter === 'function' || isArray(opts.filter)) { + filter = opts.filter; + } + + var arrayFormat; + if (opts.arrayFormat in arrayPrefixGenerators) { + arrayFormat = opts.arrayFormat; + } else if ('indices' in opts) { + arrayFormat = opts.indices ? 'indices' : 'repeat'; + } else { + arrayFormat = defaults.arrayFormat; + } + + if ('commaRoundTrip' in opts && typeof opts.commaRoundTrip !== 'boolean') { + throw new TypeError('`commaRoundTrip` must be a boolean, or absent'); + } + + var allowDots = typeof opts.allowDots === 'undefined' ? opts.encodeDotInKeys === true ? true : defaults.allowDots : !!opts.allowDots; + + return { + addQueryPrefix: typeof opts.addQueryPrefix === 'boolean' ? opts.addQueryPrefix : defaults.addQueryPrefix, + allowDots: allowDots, + allowEmptyArrays: typeof opts.allowEmptyArrays === 'boolean' ? !!opts.allowEmptyArrays : defaults.allowEmptyArrays, + arrayFormat: arrayFormat, + charset: charset, + charsetSentinel: typeof opts.charsetSentinel === 'boolean' ? opts.charsetSentinel : defaults.charsetSentinel, + commaRoundTrip: opts.commaRoundTrip, + delimiter: typeof opts.delimiter === 'undefined' ? defaults.delimiter : opts.delimiter, + encode: typeof opts.encode === 'boolean' ? opts.encode : defaults.encode, + encodeDotInKeys: typeof opts.encodeDotInKeys === 'boolean' ? opts.encodeDotInKeys : defaults.encodeDotInKeys, + encoder: typeof opts.encoder === 'function' ? opts.encoder : defaults.encoder, + encodeValuesOnly: typeof opts.encodeValuesOnly === 'boolean' ? opts.encodeValuesOnly : defaults.encodeValuesOnly, + filter: filter, + format: format, + formatter: formatter, + serializeDate: typeof opts.serializeDate === 'function' ? opts.serializeDate : defaults.serializeDate, + skipNulls: typeof opts.skipNulls === 'boolean' ? opts.skipNulls : defaults.skipNulls, + sort: typeof opts.sort === 'function' ? opts.sort : null, + strictNullHandling: typeof opts.strictNullHandling === 'boolean' ? opts.strictNullHandling : defaults.strictNullHandling + }; +}; + +module.exports = function (object, opts) { + var obj = object; + var options = normalizeStringifyOptions(opts); + + var objKeys; + var filter; + + if (typeof options.filter === 'function') { + filter = options.filter; + obj = filter('', obj); + } else if (isArray(options.filter)) { + filter = options.filter; + objKeys = filter; + } + + var keys = []; + + if (typeof obj !== 'object' || obj === null) { + return ''; + } + + var generateArrayPrefix = arrayPrefixGenerators[options.arrayFormat]; + var commaRoundTrip = generateArrayPrefix === 'comma' && options.commaRoundTrip; + + if (!objKeys) { + objKeys = Object.keys(obj); + } + + if (options.sort) { + objKeys.sort(options.sort); + } + + var sideChannel = getSideChannel(); + for (var i = 0; i < objKeys.length; ++i) { + var key = objKeys[i]; + + if (options.skipNulls && obj[key] === null) { + continue; + } + pushToArray(keys, stringify( + obj[key], + key, + generateArrayPrefix, + commaRoundTrip, + options.allowEmptyArrays, + options.strictNullHandling, + options.skipNulls, + options.encodeDotInKeys, + options.encode ? options.encoder : null, + options.filter, + options.sort, + options.allowDots, + options.serializeDate, + options.format, + options.formatter, + options.encodeValuesOnly, + options.charset, + sideChannel + )); + } + + var joined = keys.join(options.delimiter); + var prefix = options.addQueryPrefix === true ? '?' : ''; + + if (options.charsetSentinel) { + if (options.charset === 'iso-8859-1') { + // encodeURIComponent('✓'), the "numeric entity" representation of a checkmark + prefix += 'utf8=%26%2310003%3B&'; + } else { + // encodeURIComponent('✓') + prefix += 'utf8=%E2%9C%93&'; + } + } + + return joined.length > 0 ? prefix + joined : ''; +}; + + +/***/ }), + +/***/ 72360: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var formats = __nccwpck_require__(74907); + +var has = Object.prototype.hasOwnProperty; +var isArray = Array.isArray; + +var hexTable = (function () { + var array = []; + for (var i = 0; i < 256; ++i) { + array.push('%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase()); + } + + return array; +}()); + +var compactQueue = function compactQueue(queue) { + while (queue.length > 1) { + var item = queue.pop(); + var obj = item.obj[item.prop]; + + if (isArray(obj)) { + var compacted = []; + + for (var j = 0; j < obj.length; ++j) { + if (typeof obj[j] !== 'undefined') { + compacted.push(obj[j]); + } + } + + item.obj[item.prop] = compacted; + } + } +}; + +var arrayToObject = function arrayToObject(source, options) { + var obj = options && options.plainObjects ? Object.create(null) : {}; + for (var i = 0; i < source.length; ++i) { + if (typeof source[i] !== 'undefined') { + obj[i] = source[i]; + } + } + + return obj; +}; + +var merge = function merge(target, source, options) { + /* eslint no-param-reassign: 0 */ + if (!source) { + return target; + } + + if (typeof source !== 'object') { + if (isArray(target)) { + target.push(source); + } else if (target && typeof target === 'object') { + if ((options && (options.plainObjects || options.allowPrototypes)) || !has.call(Object.prototype, source)) { + target[source] = true; + } + } else { + return [target, source]; + } + + return target; + } + + if (!target || typeof target !== 'object') { + return [target].concat(source); + } + + var mergeTarget = target; + if (isArray(target) && !isArray(source)) { + mergeTarget = arrayToObject(target, options); + } + + if (isArray(target) && isArray(source)) { + source.forEach(function (item, i) { + if (has.call(target, i)) { + var targetItem = target[i]; + if (targetItem && typeof targetItem === 'object' && item && typeof item === 'object') { + target[i] = merge(targetItem, item, options); + } else { + target.push(item); + } + } else { + target[i] = item; + } + }); + return target; + } + + return Object.keys(source).reduce(function (acc, key) { + var value = source[key]; + + if (has.call(acc, key)) { + acc[key] = merge(acc[key], value, options); + } else { + acc[key] = value; + } + return acc; + }, mergeTarget); +}; + +var assign = function assignSingleSource(target, source) { + return Object.keys(source).reduce(function (acc, key) { + acc[key] = source[key]; + return acc; + }, target); +}; + +var decode = function (str, decoder, charset) { + var strWithoutPlus = str.replace(/\+/g, ' '); + if (charset === 'iso-8859-1') { + // unescape never throws, no try...catch needed: + return strWithoutPlus.replace(/%[0-9a-f]{2}/gi, unescape); + } + // utf-8 + try { + return decodeURIComponent(strWithoutPlus); + } catch (e) { + return strWithoutPlus; + } +}; + +var limit = 1024; + +/* eslint operator-linebreak: [2, "before"] */ + +var encode = function encode(str, defaultEncoder, charset, kind, format) { + // This code was originally written by Brian White (mscdex) for the io.js core querystring library. + // It has been adapted here for stricter adherence to RFC 3986 + if (str.length === 0) { + return str; + } + + var string = str; + if (typeof str === 'symbol') { + string = Symbol.prototype.toString.call(str); + } else if (typeof str !== 'string') { + string = String(str); + } + + if (charset === 'iso-8859-1') { + return escape(string).replace(/%u[0-9a-f]{4}/gi, function ($0) { + return '%26%23' + parseInt($0.slice(2), 16) + '%3B'; + }); + } + + var out = ''; + for (var j = 0; j < string.length; j += limit) { + var segment = string.length >= limit ? string.slice(j, j + limit) : string; + var arr = []; + + for (var i = 0; i < segment.length; ++i) { + var c = segment.charCodeAt(i); + if ( + c === 0x2D // - + || c === 0x2E // . + || c === 0x5F // _ + || c === 0x7E // ~ + || (c >= 0x30 && c <= 0x39) // 0-9 + || (c >= 0x41 && c <= 0x5A) // a-z + || (c >= 0x61 && c <= 0x7A) // A-Z + || (format === formats.RFC1738 && (c === 0x28 || c === 0x29)) // ( ) + ) { + arr[arr.length] = segment.charAt(i); + continue; + } + + if (c < 0x80) { + arr[arr.length] = hexTable[c]; + continue; + } + + if (c < 0x800) { + arr[arr.length] = hexTable[0xC0 | (c >> 6)] + + hexTable[0x80 | (c & 0x3F)]; + continue; + } + + if (c < 0xD800 || c >= 0xE000) { + arr[arr.length] = hexTable[0xE0 | (c >> 12)] + + hexTable[0x80 | ((c >> 6) & 0x3F)] + + hexTable[0x80 | (c & 0x3F)]; + continue; + } + + i += 1; + c = 0x10000 + (((c & 0x3FF) << 10) | (segment.charCodeAt(i) & 0x3FF)); + + arr[arr.length] = hexTable[0xF0 | (c >> 18)] + + hexTable[0x80 | ((c >> 12) & 0x3F)] + + hexTable[0x80 | ((c >> 6) & 0x3F)] + + hexTable[0x80 | (c & 0x3F)]; + } + + out += arr.join(''); + } + + return out; +}; + +var compact = function compact(value) { + var queue = [{ obj: { o: value }, prop: 'o' }]; + var refs = []; + + for (var i = 0; i < queue.length; ++i) { + var item = queue[i]; + var obj = item.obj[item.prop]; + + var keys = Object.keys(obj); + for (var j = 0; j < keys.length; ++j) { + var key = keys[j]; + var val = obj[key]; + if (typeof val === 'object' && val !== null && refs.indexOf(val) === -1) { + queue.push({ obj: obj, prop: key }); + refs.push(val); + } + } + } + + compactQueue(queue); + + return value; +}; + +var isRegExp = function isRegExp(obj) { + return Object.prototype.toString.call(obj) === '[object RegExp]'; +}; + +var isBuffer = function isBuffer(obj) { + if (!obj || typeof obj !== 'object') { + return false; + } + + return !!(obj.constructor && obj.constructor.isBuffer && obj.constructor.isBuffer(obj)); +}; + +var combine = function combine(a, b) { + return [].concat(a, b); +}; + +var maybeMap = function maybeMap(val, fn) { + if (isArray(val)) { + var mapped = []; + for (var i = 0; i < val.length; i += 1) { + mapped.push(fn(val[i])); + } + return mapped; + } + return fn(val); +}; + +module.exports = { + arrayToObject: arrayToObject, + assign: assign, + combine: combine, + compact: compact, + decode: decode, + encode: encode, + isBuffer: isBuffer, + isRegExp: isRegExp, + maybeMap: maybeMap, + merge: merge +}; + + +/***/ }), + +/***/ 21867: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(14300) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + +/***/ }), + +/***/ 15118: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/* eslint-disable node/no-deprecated-api */ + + + +var buffer = __nccwpck_require__(14300) +var Buffer = buffer.Buffer + +var safer = {} + +var key + +for (key in buffer) { + if (!buffer.hasOwnProperty(key)) continue + if (key === 'SlowBuffer' || key === 'Buffer') continue + safer[key] = buffer[key] +} + +var Safer = safer.Buffer = {} +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (key === 'allocUnsafe' || key === 'allocUnsafeSlow') continue + Safer[key] = Buffer[key] +} + +safer.Buffer.prototype = Buffer.prototype + +if (!Safer.from || Safer.from === Uint8Array.from) { + Safer.from = function (value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('The "value" argument must not be of type number. Received type ' + typeof value) + } + if (value && typeof value.length === 'undefined') { + throw new TypeError('The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value) + } + return Buffer(value, encodingOrOffset, length) + } +} + +if (!Safer.alloc) { + Safer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + var buf = Buffer(size) + if (!fill || fill.length === 0) { + buf.fill(0) + } else if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + return buf + } +} + +if (!safer.kStringMaxLength) { + try { + safer.kStringMaxLength = process.binding('buffer').kStringMaxLength + } catch (e) { + // we can't determine kStringMaxLength in environments where process.binding + // is unsupported, so let's not set it + } +} + +if (!safer.constants) { + safer.constants = { + MAX_LENGTH: safer.kMaxLength + } + if (safer.kStringMaxLength) { + safer.constants.MAX_STRING_LENGTH = safer.kStringMaxLength + } +} + +module.exports = safer + + +/***/ }), + +/***/ 72043: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // disallow unquoted attribute values if not otherwise configured + // and strict mode is true + if (parser.opt.unquotedAttributeValues === undefined) { + parser.opt.unquotedAttributeValues = !strict; + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = (__nccwpck_require__(12781).Stream) + } catch (ex) { + Stream = function () {} + } + if (!Stream) Stream = function () {} + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = (__nccwpck_require__(71576).StringDecoder) + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, //