diff --git a/dist/index.js b/dist/index.js index 341bde3..0cfe3cb 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,7 +1,7 @@ /******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ -/***/ 27724: +/***/ 32481: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -21,30 +21,30 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -const client_1 = __nccwpck_require__(41932); -__exportStar(__nccwpck_require__(68213), exports); -__exportStar(__nccwpck_require__(3453), exports); -__exportStar(__nccwpck_require__(41932), exports); +const client_1 = __nccwpck_require__(69801); +__exportStar(__nccwpck_require__(73511), exports); +__exportStar(__nccwpck_require__(3380), exports); +__exportStar(__nccwpck_require__(69801), exports); const client = new client_1.DefaultArtifactClient(); exports["default"] = client; //# sourceMappingURL=artifact.js.map /***/ }), -/***/ 23162: +/***/ 1485: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Timestamp = void 0; -const runtime_1 = __nccwpck_require__(35435); -const runtime_2 = __nccwpck_require__(35435); -const runtime_3 = __nccwpck_require__(35435); -const runtime_4 = __nccwpck_require__(35435); -const runtime_5 = __nccwpck_require__(35435); -const runtime_6 = __nccwpck_require__(35435); -const runtime_7 = __nccwpck_require__(35435); +const runtime_1 = __nccwpck_require__(82905); +const runtime_2 = __nccwpck_require__(82905); +const runtime_3 = __nccwpck_require__(82905); +const runtime_4 = __nccwpck_require__(82905); +const runtime_5 = __nccwpck_require__(82905); +const runtime_6 = __nccwpck_require__(82905); +const runtime_7 = __nccwpck_require__(82905); // @generated message type with reflection information, may provide speed optimized methods class Timestamp$Type extends runtime_7.MessageType { constructor() { @@ -174,7 +174,7 @@ exports.Timestamp = new Timestamp$Type(); /***/ }), -/***/ 6913: +/***/ 74006: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -221,13 +221,13 @@ exports.BytesValue = exports.StringValue = exports.BoolValue = exports.UInt32Val // where we need to distinguish between the absence of a primitive // typed field and its default value. // -const runtime_1 = __nccwpck_require__(35435); -const runtime_2 = __nccwpck_require__(35435); -const runtime_3 = __nccwpck_require__(35435); -const runtime_4 = __nccwpck_require__(35435); -const runtime_5 = __nccwpck_require__(35435); -const runtime_6 = __nccwpck_require__(35435); -const runtime_7 = __nccwpck_require__(35435); +const runtime_1 = __nccwpck_require__(82905); +const runtime_2 = __nccwpck_require__(82905); +const runtime_3 = __nccwpck_require__(82905); +const runtime_4 = __nccwpck_require__(82905); +const runtime_5 = __nccwpck_require__(82905); +const runtime_6 = __nccwpck_require__(82905); +const runtime_7 = __nccwpck_require__(82905); // @generated message type with reflection information, may provide speed optimized methods class DoubleValue$Type extends runtime_7.MessageType { constructor() { @@ -790,7 +790,7 @@ exports.BytesValue = new BytesValue$Type(); /***/ }), -/***/ 19832: +/***/ 23508: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -810,15 +810,15 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -__exportStar(__nccwpck_require__(23162), exports); -__exportStar(__nccwpck_require__(6913), exports); -__exportStar(__nccwpck_require__(93570), exports); -__exportStar(__nccwpck_require__(24728), exports); +__exportStar(__nccwpck_require__(1485), exports); +__exportStar(__nccwpck_require__(74006), exports); +__exportStar(__nccwpck_require__(29037), exports); +__exportStar(__nccwpck_require__(29117), exports); //# sourceMappingURL=index.js.map /***/ }), -/***/ 93570: +/***/ 29037: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -828,15 +828,15 @@ exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifac // @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies // @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3) // tslint:disable -const runtime_rpc_1 = __nccwpck_require__(40543); -const runtime_1 = __nccwpck_require__(35435); -const runtime_2 = __nccwpck_require__(35435); -const runtime_3 = __nccwpck_require__(35435); -const runtime_4 = __nccwpck_require__(35435); -const runtime_5 = __nccwpck_require__(35435); -const wrappers_1 = __nccwpck_require__(6913); -const wrappers_2 = __nccwpck_require__(6913); -const timestamp_1 = __nccwpck_require__(23162); +const runtime_rpc_1 = __nccwpck_require__(84183); +const runtime_1 = __nccwpck_require__(82905); +const runtime_2 = __nccwpck_require__(82905); +const runtime_3 = __nccwpck_require__(82905); +const runtime_4 = __nccwpck_require__(82905); +const runtime_5 = __nccwpck_require__(82905); +const wrappers_1 = __nccwpck_require__(74006); +const wrappers_2 = __nccwpck_require__(74006); +const timestamp_1 = __nccwpck_require__(1485); // @generated message type with reflection information, may provide speed optimized methods class CreateArtifactRequest$Type extends runtime_5.MessageType { constructor() { @@ -1529,7 +1529,7 @@ exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results. /***/ }), -/***/ 24728: +/***/ 29117: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -1545,8 +1545,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createArtifactServiceServer = exports.ArtifactServiceMethodList = exports.ArtifactServiceMethod = exports.ArtifactServiceClientProtobuf = exports.ArtifactServiceClientJSON = void 0; -const twirp_ts_1 = __nccwpck_require__(39231); -const artifact_1 = __nccwpck_require__(93570); +const twirp_ts_1 = __nccwpck_require__(41826); +const artifact_1 = __nccwpck_require__(29037); class ArtifactServiceClientJSON { constructor(rpc) { this.rpc = rpc; @@ -2044,7 +2044,7 @@ function handleArtifactServiceDeleteArtifactProtobuf(ctx, service, data, interce /***/ }), -/***/ 41932: +/***/ 69801: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2071,14 +2071,14 @@ var __rest = (this && this.__rest) || function (s, e) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DefaultArtifactClient = void 0; -const core_1 = __nccwpck_require__(57335); -const config_1 = __nccwpck_require__(60105); -const upload_artifact_1 = __nccwpck_require__(73875); -const download_artifact_1 = __nccwpck_require__(66418); -const delete_artifact_1 = __nccwpck_require__(21552); -const get_artifact_1 = __nccwpck_require__(53315); -const list_artifacts_1 = __nccwpck_require__(10198); -const errors_1 = __nccwpck_require__(3453); +const core_1 = __nccwpck_require__(95127); +const config_1 = __nccwpck_require__(59030); +const upload_artifact_1 = __nccwpck_require__(72390); +const download_artifact_1 = __nccwpck_require__(30789); +const delete_artifact_1 = __nccwpck_require__(45847); +const get_artifact_1 = __nccwpck_require__(95551); +const list_artifacts_1 = __nccwpck_require__(56668); +const errors_1 = __nccwpck_require__(3380); /** * The default artifact client that is used by the artifact action(s). */ @@ -2195,7 +2195,7 @@ exports.DefaultArtifactClient = DefaultArtifactClient; /***/ }), -/***/ 21552: +/***/ 45847: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2211,18 +2211,18 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deleteArtifactInternal = exports.deleteArtifactPublic = void 0; -const core_1 = __nccwpck_require__(57335); -const github_1 = __nccwpck_require__(8623); -const user_agent_1 = __nccwpck_require__(47910); -const retry_options_1 = __nccwpck_require__(33035); -const utils_1 = __nccwpck_require__(92500); -const plugin_request_log_1 = __nccwpck_require__(81160); -const plugin_retry_1 = __nccwpck_require__(35336); -const artifact_twirp_client_1 = __nccwpck_require__(42586); -const util_1 = __nccwpck_require__(92274); -const generated_1 = __nccwpck_require__(19832); -const get_artifact_1 = __nccwpck_require__(53315); -const errors_1 = __nccwpck_require__(3453); +const core_1 = __nccwpck_require__(95127); +const github_1 = __nccwpck_require__(53134); +const user_agent_1 = __nccwpck_require__(66066); +const retry_options_1 = __nccwpck_require__(64635); +const utils_1 = __nccwpck_require__(5310); +const plugin_request_log_1 = __nccwpck_require__(67823); +const plugin_retry_1 = __nccwpck_require__(57293); +const artifact_twirp_client_1 = __nccwpck_require__(17152); +const util_1 = __nccwpck_require__(1574); +const generated_1 = __nccwpck_require__(23508); +const get_artifact_1 = __nccwpck_require__(95551); +const errors_1 = __nccwpck_require__(3380); function deleteArtifactPublic(artifactName, workflowRunId, repositoryOwner, repositoryName, token) { var _a; return __awaiter(this, void 0, void 0, function* () { @@ -2285,7 +2285,7 @@ exports.deleteArtifactInternal = deleteArtifactInternal; /***/ }), -/***/ 66418: +/***/ 30789: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2328,16 +2328,16 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0; const promises_1 = __importDefault(__nccwpck_require__(73292)); -const github = __importStar(__nccwpck_require__(8623)); -const core = __importStar(__nccwpck_require__(57335)); -const httpClient = __importStar(__nccwpck_require__(94646)); -const unzip_stream_1 = __importDefault(__nccwpck_require__(149)); -const user_agent_1 = __nccwpck_require__(47910); -const config_1 = __nccwpck_require__(60105); -const artifact_twirp_client_1 = __nccwpck_require__(42586); -const generated_1 = __nccwpck_require__(19832); -const util_1 = __nccwpck_require__(92274); -const errors_1 = __nccwpck_require__(3453); +const github = __importStar(__nccwpck_require__(53134)); +const core = __importStar(__nccwpck_require__(95127)); +const httpClient = __importStar(__nccwpck_require__(76227)); +const unzip_stream_1 = __importDefault(__nccwpck_require__(67821)); +const user_agent_1 = __nccwpck_require__(66066); +const config_1 = __nccwpck_require__(59030); +const artifact_twirp_client_1 = __nccwpck_require__(17152); +const generated_1 = __nccwpck_require__(23508); +const util_1 = __nccwpck_require__(1574); +const errors_1 = __nccwpck_require__(3380); const scrubQueryParameters = (url) => { const parsed = new URL(url); parsed.search = ''; @@ -2497,7 +2497,7 @@ function resolveOrCreateDirectory(downloadPath = (0, config_1.getGitHubWorkspace /***/ }), -/***/ 53315: +/***/ 95551: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2536,17 +2536,17 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getArtifactInternal = exports.getArtifactPublic = void 0; -const github_1 = __nccwpck_require__(8623); -const plugin_retry_1 = __nccwpck_require__(35336); -const core = __importStar(__nccwpck_require__(57335)); -const utils_1 = __nccwpck_require__(92500); -const retry_options_1 = __nccwpck_require__(33035); -const plugin_request_log_1 = __nccwpck_require__(81160); -const util_1 = __nccwpck_require__(92274); -const user_agent_1 = __nccwpck_require__(47910); -const artifact_twirp_client_1 = __nccwpck_require__(42586); -const generated_1 = __nccwpck_require__(19832); -const errors_1 = __nccwpck_require__(3453); +const github_1 = __nccwpck_require__(53134); +const plugin_retry_1 = __nccwpck_require__(57293); +const core = __importStar(__nccwpck_require__(95127)); +const utils_1 = __nccwpck_require__(5310); +const retry_options_1 = __nccwpck_require__(64635); +const plugin_request_log_1 = __nccwpck_require__(67823); +const util_1 = __nccwpck_require__(1574); +const user_agent_1 = __nccwpck_require__(66066); +const artifact_twirp_client_1 = __nccwpck_require__(17152); +const generated_1 = __nccwpck_require__(23508); +const errors_1 = __nccwpck_require__(3380); function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, repositoryName, token) { var _a; return __awaiter(this, void 0, void 0, function* () { @@ -2626,7 +2626,7 @@ exports.getArtifactInternal = getArtifactInternal; /***/ }), -/***/ 10198: +/***/ 56668: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2642,16 +2642,16 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.listArtifactsInternal = exports.listArtifactsPublic = void 0; -const core_1 = __nccwpck_require__(57335); -const github_1 = __nccwpck_require__(8623); -const user_agent_1 = __nccwpck_require__(47910); -const retry_options_1 = __nccwpck_require__(33035); -const utils_1 = __nccwpck_require__(92500); -const plugin_request_log_1 = __nccwpck_require__(81160); -const plugin_retry_1 = __nccwpck_require__(35336); -const artifact_twirp_client_1 = __nccwpck_require__(42586); -const util_1 = __nccwpck_require__(92274); -const generated_1 = __nccwpck_require__(19832); +const core_1 = __nccwpck_require__(95127); +const github_1 = __nccwpck_require__(53134); +const user_agent_1 = __nccwpck_require__(66066); +const retry_options_1 = __nccwpck_require__(64635); +const utils_1 = __nccwpck_require__(5310); +const plugin_request_log_1 = __nccwpck_require__(67823); +const plugin_retry_1 = __nccwpck_require__(57293); +const artifact_twirp_client_1 = __nccwpck_require__(17152); +const util_1 = __nccwpck_require__(1574); +const generated_1 = __nccwpck_require__(23508); // Limiting to 1000 for perf reasons const maximumArtifactCount = 1000; const paginationCount = 100; @@ -2772,7 +2772,7 @@ function filterLatest(artifacts) { /***/ }), -/***/ 33035: +/***/ 64635: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2802,7 +2802,7 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getRetryOptions = void 0; -const core = __importStar(__nccwpck_require__(57335)); +const core = __importStar(__nccwpck_require__(95127)); // Defaults for fetching artifacts const defaultMaxRetryNumber = 5; const defaultExemptStatusCodes = [400, 401, 403, 404, 422]; // https://github.com/octokit/plugin-retry.js/blob/9a2443746c350b3beedec35cf26e197ea318a261/src/index.ts#L14 @@ -2829,7 +2829,7 @@ exports.getRetryOptions = getRetryOptions; /***/ }), -/***/ 42586: +/***/ 17152: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -2845,13 +2845,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.internalArtifactTwirpClient = void 0; -const http_client_1 = __nccwpck_require__(94646); -const auth_1 = __nccwpck_require__(42991); -const core_1 = __nccwpck_require__(57335); -const generated_1 = __nccwpck_require__(19832); -const config_1 = __nccwpck_require__(60105); -const user_agent_1 = __nccwpck_require__(47910); -const errors_1 = __nccwpck_require__(3453); +const http_client_1 = __nccwpck_require__(76227); +const auth_1 = __nccwpck_require__(75181); +const core_1 = __nccwpck_require__(95127); +const generated_1 = __nccwpck_require__(23508); +const config_1 = __nccwpck_require__(59030); +const user_agent_1 = __nccwpck_require__(66066); +const errors_1 = __nccwpck_require__(3380); class ArtifactHttpClient { constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) { this.maxAttempts = 5; @@ -2988,7 +2988,7 @@ exports.internalArtifactTwirpClient = internalArtifactTwirpClient; /***/ }), -/***/ 60105: +/***/ 59030: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -3053,7 +3053,7 @@ exports.getConcurrency = getConcurrency; /***/ }), -/***/ 3453: +/***/ 3380: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -3130,7 +3130,7 @@ UsageError.isUsageErrorMessage = (msg) => { /***/ }), -/***/ 68213: +/***/ 73511: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -3140,7 +3140,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), -/***/ 47910: +/***/ 66066: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -3160,7 +3160,7 @@ exports.getUserAgentString = getUserAgentString; /***/ }), -/***/ 92274: +/***/ 1574: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -3193,9 +3193,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getBackendIdsFromToken = void 0; -const core = __importStar(__nccwpck_require__(57335)); -const config_1 = __nccwpck_require__(60105); -const jwt_decode_1 = __importDefault(__nccwpck_require__(44485)); +const core = __importStar(__nccwpck_require__(95127)); +const config_1 = __nccwpck_require__(59030); +const jwt_decode_1 = __importDefault(__nccwpck_require__(33675)); const InvalidJwtError = new Error('Failed to get backend IDs: The provided JWT token is invalid and/or missing claims'); // uses the JWT token claims to get the // workflow run and workflow job run backend ids @@ -3248,7 +3248,7 @@ exports.getBackendIdsFromToken = getBackendIdsFromToken; /***/ }), -/***/ 87074: +/***/ 94711: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -3287,12 +3287,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.uploadZipToBlobStorage = void 0; -const storage_blob_1 = __nccwpck_require__(46045); -const config_1 = __nccwpck_require__(60105); -const core = __importStar(__nccwpck_require__(57335)); +const storage_blob_1 = __nccwpck_require__(48887); +const config_1 = __nccwpck_require__(59030); +const core = __importStar(__nccwpck_require__(95127)); const crypto = __importStar(__nccwpck_require__(6113)); const stream = __importStar(__nccwpck_require__(12781)); -const errors_1 = __nccwpck_require__(3453); +const errors_1 = __nccwpck_require__(3380); function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { return __awaiter(this, void 0, void 0, function* () { let uploadByteCount = 0; @@ -3370,14 +3370,14 @@ exports.uploadZipToBlobStorage = uploadZipToBlobStorage; /***/ }), -/***/ 16504: +/***/ 37360: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.validateFilePath = exports.validateArtifactName = void 0; -const core_1 = __nccwpck_require__(57335); +const core_1 = __nccwpck_require__(95127); /** * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain @@ -3444,7 +3444,7 @@ exports.validateFilePath = validateFilePath; /***/ }), -/***/ 93080: +/***/ 27838: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -3474,8 +3474,8 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getExpiration = void 0; -const generated_1 = __nccwpck_require__(19832); -const core = __importStar(__nccwpck_require__(57335)); +const generated_1 = __nccwpck_require__(23508); +const core = __importStar(__nccwpck_require__(95127)); function getExpiration(retentionDays) { if (!retentionDays) { return undefined; @@ -3505,7 +3505,7 @@ function getRetentionDays() { /***/ }), -/***/ 73875: +/***/ 72390: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -3544,16 +3544,16 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.uploadArtifact = void 0; -const core = __importStar(__nccwpck_require__(57335)); -const retention_1 = __nccwpck_require__(93080); -const path_and_artifact_name_validation_1 = __nccwpck_require__(16504); -const artifact_twirp_client_1 = __nccwpck_require__(42586); -const upload_zip_specification_1 = __nccwpck_require__(99425); -const util_1 = __nccwpck_require__(92274); -const blob_upload_1 = __nccwpck_require__(87074); -const zip_1 = __nccwpck_require__(9614); -const generated_1 = __nccwpck_require__(19832); -const errors_1 = __nccwpck_require__(3453); +const core = __importStar(__nccwpck_require__(95127)); +const retention_1 = __nccwpck_require__(27838); +const path_and_artifact_name_validation_1 = __nccwpck_require__(37360); +const artifact_twirp_client_1 = __nccwpck_require__(17152); +const upload_zip_specification_1 = __nccwpck_require__(1985); +const util_1 = __nccwpck_require__(1574); +const blob_upload_1 = __nccwpck_require__(94711); +const zip_1 = __nccwpck_require__(37164); +const generated_1 = __nccwpck_require__(23508); +const errors_1 = __nccwpck_require__(3380); function uploadArtifact(name, files, rootDirectory, options) { return __awaiter(this, void 0, void 0, function* () { (0, path_and_artifact_name_validation_1.validateArtifactName)(name); @@ -3615,7 +3615,7 @@ exports.uploadArtifact = uploadArtifact; /***/ }), -/***/ 99425: +/***/ 1985: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -3646,9 +3646,9 @@ var __importStar = (this && this.__importStar) || function (mod) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getUploadZipSpecification = exports.validateRootDirectory = void 0; const fs = __importStar(__nccwpck_require__(57147)); -const core_1 = __nccwpck_require__(57335); +const core_1 = __nccwpck_require__(95127); const path_1 = __nccwpck_require__(71017); -const path_and_artifact_name_validation_1 = __nccwpck_require__(16504); +const path_and_artifact_name_validation_1 = __nccwpck_require__(37360); /** * Checks if a root directory exists and is valid * @param rootDirectory an absolute root directory path common to all input files that that will be trimmed from the final zip structure @@ -3735,7 +3735,7 @@ exports.getUploadZipSpecification = getUploadZipSpecification; /***/ }), -/***/ 9614: +/***/ 37164: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -3775,10 +3775,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRESSION_LEVEL = void 0; const stream = __importStar(__nccwpck_require__(12781)); -const archiver = __importStar(__nccwpck_require__(50126)); -const core = __importStar(__nccwpck_require__(57335)); +const archiver = __importStar(__nccwpck_require__(7531)); +const core = __importStar(__nccwpck_require__(95127)); const fs_1 = __nccwpck_require__(57147); -const config_1 = __nccwpck_require__(60105); +const config_1 = __nccwpck_require__(59030); exports.DEFAULT_COMPRESSION_LEVEL = 6; // Custom stream transformer so we can set the highWaterMark property // See https://github.com/nodejs/node/issues/8855 @@ -3855,7 +3855,7 @@ const zipEndCallback = () => { /***/ }), -/***/ 6859: +/***/ 65604: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -3882,7 +3882,7 @@ var __importStar = (this && this.__importStar) || function (mod) { Object.defineProperty(exports, "__esModule", ({ value: true })); exports.issue = exports.issueCommand = void 0; const os = __importStar(__nccwpck_require__(22037)); -const utils_1 = __nccwpck_require__(22549); +const utils_1 = __nccwpck_require__(91245); /** * Commands * @@ -3954,7 +3954,7 @@ function escapeProperty(s) { /***/ }), -/***/ 57335: +/***/ 95127: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -3989,12 +3989,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; -const command_1 = __nccwpck_require__(6859); -const file_command_1 = __nccwpck_require__(76419); -const utils_1 = __nccwpck_require__(22549); +const command_1 = __nccwpck_require__(65604); +const file_command_1 = __nccwpck_require__(77352); +const utils_1 = __nccwpck_require__(91245); const os = __importStar(__nccwpck_require__(22037)); const path = __importStar(__nccwpck_require__(71017)); -const oidc_utils_1 = __nccwpck_require__(58925); +const oidc_utils_1 = __nccwpck_require__(64457); /** * The code to exit an action */ @@ -4279,17 +4279,17 @@ exports.getIDToken = getIDToken; /** * Summary exports */ -var summary_1 = __nccwpck_require__(72389); +var summary_1 = __nccwpck_require__(99124); Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); /** * @deprecated use core.summary */ -var summary_2 = __nccwpck_require__(72389); +var summary_2 = __nccwpck_require__(99124); Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); /** * Path exports */ -var path_utils_1 = __nccwpck_require__(1426); +var path_utils_1 = __nccwpck_require__(57169); Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); @@ -4297,7 +4297,7 @@ Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: funct /***/ }), -/***/ 76419: +/***/ 77352: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -4328,8 +4328,8 @@ exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; /* eslint-disable @typescript-eslint/no-explicit-any */ const fs = __importStar(__nccwpck_require__(57147)); const os = __importStar(__nccwpck_require__(22037)); -const uuid_1 = __nccwpck_require__(75474); -const utils_1 = __nccwpck_require__(22549); +const uuid_1 = __nccwpck_require__(89267); +const utils_1 = __nccwpck_require__(91245); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { @@ -4362,7 +4362,7 @@ exports.prepareKeyValueMessage = prepareKeyValueMessage; /***/ }), -/***/ 58925: +/***/ 64457: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -4378,9 +4378,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OidcClient = void 0; -const http_client_1 = __nccwpck_require__(94646); -const auth_1 = __nccwpck_require__(42991); -const core_1 = __nccwpck_require__(57335); +const http_client_1 = __nccwpck_require__(76227); +const auth_1 = __nccwpck_require__(75181); +const core_1 = __nccwpck_require__(95127); class OidcClient { static createHttpClient(allowRetry = true, maxRetry = 10) { const requestOptions = { @@ -4412,7 +4412,7 @@ class OidcClient { .catch(error => { throw new Error(`Failed to get ID Token. \n Error Code : ${error.statusCode}\n - Error Message: ${error.result.message}`); + Error Message: ${error.message}`); }); const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; if (!id_token) { @@ -4446,7 +4446,7 @@ exports.OidcClient = OidcClient; /***/ }), -/***/ 1426: +/***/ 57169: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -4511,7 +4511,7 @@ exports.toPlatformPath = toPlatformPath; /***/ }), -/***/ 72389: +/***/ 99124: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -4801,7 +4801,7 @@ exports.summary = _summary; /***/ }), -/***/ 22549: +/***/ 91245: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -4848,7 +4848,7 @@ exports.toCommandProperties = toCommandProperties; /***/ }), -/***/ 8283: +/***/ 75210: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -4909,7 +4909,7 @@ exports.Context = Context; /***/ }), -/***/ 8623: +/***/ 53134: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -4935,8 +4935,8 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOctokit = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(8283)); -const utils_1 = __nccwpck_require__(92500); +const Context = __importStar(__nccwpck_require__(75210)); +const utils_1 = __nccwpck_require__(5310); exports.context = new Context.Context(); /** * Returns a hydrated octokit ready to use for GitHub Actions @@ -4953,7 +4953,7 @@ exports.getOctokit = getOctokit; /***/ }), -/***/ 94334: +/***/ 47329: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -4979,7 +4979,7 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(94646)); +const httpClient = __importStar(__nccwpck_require__(76227)); function getAuthString(token, options) { if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); @@ -5003,7 +5003,7 @@ exports.getApiBaseUrl = getApiBaseUrl; /***/ }), -/***/ 92500: +/***/ 5310: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -5029,12 +5029,12 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(8283)); -const Utils = __importStar(__nccwpck_require__(94334)); +const Context = __importStar(__nccwpck_require__(75210)); +const Utils = __importStar(__nccwpck_require__(47329)); // octokit + plugins -const core_1 = __nccwpck_require__(72762); -const plugin_rest_endpoint_methods_1 = __nccwpck_require__(31478); -const plugin_paginate_rest_1 = __nccwpck_require__(22517); +const core_1 = __nccwpck_require__(6461); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(96752); +const plugin_paginate_rest_1 = __nccwpck_require__(49883); exports.context = new Context.Context(); const baseUrl = Utils.getApiBaseUrl(); exports.defaults = { @@ -5064,7 +5064,7 @@ exports.getOctokitOptions = getOctokitOptions; /***/ }), -/***/ 42991: +/***/ 75181: /***/ (function(__unused_webpack_module, exports) { "use strict"; @@ -5152,7 +5152,7 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand /***/ }), -/***/ 94646: +/***/ 76227: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -5194,9 +5194,9 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; const http = __importStar(__nccwpck_require__(13685)); const https = __importStar(__nccwpck_require__(95687)); -const pm = __importStar(__nccwpck_require__(1856)); -const tunnel = __importStar(__nccwpck_require__(62150)); -const undici_1 = __nccwpck_require__(34397); +const pm = __importStar(__nccwpck_require__(90603)); +const tunnel = __importStar(__nccwpck_require__(47265)); +const undici_1 = __nccwpck_require__(49164); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -5811,7 +5811,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa /***/ }), -/***/ 1856: +/***/ 90603: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -5900,7 +5900,7 @@ function isLoopbackAddress(host) { /***/ }), -/***/ 9694: +/***/ 33483: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -6147,7 +6147,7 @@ exports.AbortSignal = AbortSignal; /***/ }), -/***/ 49584: +/***/ 48887: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -6155,27 +6155,25 @@ exports.AbortSignal = AbortSignal; Object.defineProperty(exports, "__esModule", ({ value: true })); -var uuid = __nccwpck_require__(75474); -var util = __nccwpck_require__(73837); -var tslib = __nccwpck_require__(93167); -var xml2js = __nccwpck_require__(25791); -var coreUtil = __nccwpck_require__(55764); -var logger$1 = __nccwpck_require__(52316); -var coreAuth = __nccwpck_require__(61095); -var os = __nccwpck_require__(22037); -var http = __nccwpck_require__(13685); -var https = __nccwpck_require__(95687); -var abortController = __nccwpck_require__(9694); -var tunnel = __nccwpck_require__(62150); +var coreRestPipeline = __nccwpck_require__(78013); +var tslib = __nccwpck_require__(12553); +var coreAuth = __nccwpck_require__(59266); +var coreUtil = __nccwpck_require__(30409); +var coreHttpCompat = __nccwpck_require__(18442); +var coreClient = __nccwpck_require__(7802); +var coreXml = __nccwpck_require__(86518); +var logger$1 = __nccwpck_require__(76085); +var abortController = __nccwpck_require__(33483); +var crypto = __nccwpck_require__(6113); +var coreTracing = __nccwpck_require__(79030); var stream = __nccwpck_require__(12781); -var FormData = __nccwpck_require__(85544); -var node_fetch = __nccwpck_require__(26006); -var coreTracing = __nccwpck_require__(51695); - -function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } +var coreLro = __nccwpck_require__(16855); +var events = __nccwpck_require__(82361); +var fs = __nccwpck_require__(57147); +var util = __nccwpck_require__(73837); +var buffer = __nccwpck_require__(14300); -function _interopNamespace(e) { - if (e && e.__esModule) return e; +function _interopNamespaceDefault(e) { var n = Object.create(null); if (e) { Object.keys(e).forEach(function (k) { @@ -6188,6201 +6186,3261 @@ function _interopNamespace(e) { } }); } - n["default"] = e; + n.default = e; return Object.freeze(n); } -var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); -var os__namespace = /*#__PURE__*/_interopNamespace(os); -var http__namespace = /*#__PURE__*/_interopNamespace(http); -var https__namespace = /*#__PURE__*/_interopNamespace(https); -var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); -var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); -var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); +var coreHttpCompat__namespace = /*#__PURE__*/_interopNamespaceDefault(coreHttpCompat); +var coreClient__namespace = /*#__PURE__*/_interopNamespaceDefault(coreClient); +var fs__namespace = /*#__PURE__*/_interopNamespaceDefault(fs); +var util__namespace = /*#__PURE__*/_interopNamespaceDefault(util); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * A collection of HttpHeaders that can be sent with a HTTP request. + * The `@azure/logger` configuration for this package. */ -function getHeaderKey(headerName) { - return headerName.toLowerCase(); -} -function isHttpHeadersLike(object) { - if (object && typeof object === "object") { - const castObject = object; - if (typeof castObject.rawHeaders === "function" && - typeof castObject.clone === "function" && - typeof castObject.get === "function" && - typeof castObject.set === "function" && - typeof castObject.contains === "function" && - typeof castObject.remove === "function" && - typeof castObject.headersArray === "function" && - typeof castObject.headerValues === "function" && - typeof castObject.headerNames === "function" && - typeof castObject.toJson === "function") { - return true; - } - } - return false; -} +const logger = logger$1.createClientLogger("storage-blob"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * A collection of HTTP header key/value pairs. + * The base class from which all request policies derive. */ -class HttpHeaders { - constructor(rawHeaders) { - this._headersMap = {}; - if (rawHeaders) { - for (const headerName in rawHeaders) { - this.set(headerName, rawHeaders[headerName]); - } - } - } - /** - * Set a header in this collection with the provided name and value. The name is - * case-insensitive. - * @param headerName - The name of the header to set. This value is case-insensitive. - * @param headerValue - The value of the header to set. - */ - set(headerName, headerValue) { - this._headersMap[getHeaderKey(headerName)] = { - name: headerName, - value: headerValue.toString().trim(), - }; - } - /** - * Get the header value for the provided header name, or undefined if no header exists in this - * collection with the provided name. - * @param headerName - The name of the header. - */ - get(headerName) { - const header = this._headersMap[getHeaderKey(headerName)]; - return !header ? undefined : header.value; - } - /** - * Get whether or not this header collection contains a header entry for the provided header name. - */ - contains(headerName) { - return !!this._headersMap[getHeaderKey(headerName)]; - } - /** - * Remove the header with the provided headerName. Return whether or not the header existed and - * was removed. - * @param headerName - The name of the header to remove. - */ - remove(headerName) { - const result = this.contains(headerName); - delete this._headersMap[getHeaderKey(headerName)]; - return result; - } - /** - * Get the headers that are contained this collection as an object. - */ - rawHeaders() { - return this.toJson({ preserveCase: true }); - } - /** - * Get the headers that are contained in this collection as an array. - */ - headersArray() { - const headers = []; - for (const headerKey in this._headersMap) { - headers.push(this._headersMap[headerKey]); - } - return headers; - } +class BaseRequestPolicy { /** - * Get the header names that are contained in this collection. + * The main method to implement that manipulates a request/response. */ - headerNames() { - const headerNames = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerNames.push(headers[i].name); - } - return headerNames; - } + constructor( /** - * Get the header values that are contained in this collection. + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. */ - headerValues() { - const headerValues = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerValues.push(headers[i].value); - } - return headerValues; - } + _nextPolicy, /** - * Get the JSON object representation of this HTTP header collection. + * The options that can be passed to a given request policy. */ - toJson(options = {}) { - const result = {}; - if (options.preserveCase) { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[header.name] = header.value; - } - } - else { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[getHeaderKey(header.name)] = header.value; - } - } - return result; + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; } /** - * Get the string representation of this HTTP header collection. + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. */ - toString() { - return JSON.stringify(this.toJson({ preserveCase: true })); + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); } /** - * Create a deep clone/copy of this HttpHeaders collection. + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. */ - clone() { - const resultPreservingCasing = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - resultPreservingCasing[header.name] = header.value; - } - return new HttpHeaders(resultPreservingCasing); + log(logLevel, message) { + this._options.log(logLevel, message); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +const SDK_VERSION = "12.23.0"; +const SERVICE_VERSION = "2024-05-04"; +const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +const BLOCK_BLOB_MAX_BLOCKS = 50000; +const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +const REQUEST_TIMEOUT = 100 * 1000; // In ms /** - * Encodes a string in base64 format. - * @param value - The string to encode - */ -function encodeString(value) { - return Buffer.from(value).toString("base64"); -} -/** - * Encodes a byte array in base64 format. - * @param value - The Uint8Aray to encode - */ -function encodeByteArray(value) { - // Buffer.from accepts | -- the TypeScript definition is off here - // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length - const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); - return bufferValue.toString("base64"); -} -/** - * Decodes a base64 string into a byte array. - * @param value - The base64 string to decode - */ -function decodeString(value) { - return Buffer.from(value, "base64"); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * A set of constants used internally when processing requests. + * The OAuth scope to use with Azure Storage. */ -const Constants = { - /** - * The core-http version - */ - coreHttpVersion: "3.0.4", - /** - * Specifies HTTP. - */ - HTTP: "http:", - /** - * Specifies HTTPS. - */ - HTTPS: "https:", - /** - * Specifies HTTP Proxy. - */ - HTTP_PROXY: "HTTP_PROXY", - /** - * Specifies HTTPS Proxy. - */ - HTTPS_PROXY: "HTTPS_PROXY", - /** - * Specifies NO Proxy. - */ - NO_PROXY: "NO_PROXY", - /** - * Specifies ALL Proxy. - */ - ALL_PROXY: "ALL_PROXY", - HttpConstants: { - /** - * Http Verbs - */ - HttpVerbs: { - PUT: "PUT", - GET: "GET", - DELETE: "DELETE", - POST: "POST", - MERGE: "MERGE", - HEAD: "HEAD", - PATCH: "PATCH", - }, - StatusCodes: { - TooManyRequests: 429, - ServiceUnavailable: 503, - }, - }, - /** - * Defines constants for use with HTTP headers. - */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization", - AUTHORIZATION_SCHEME: "Bearer", - /** - * The Retry-After response-header field can be used with a 503 (Service - * Unavailable) or 349 (Too Many Requests) responses to indicate how long - * the service is expected to be unavailable to the requesting client. - */ - RETRY_AFTER: "Retry-After", - /** - * The UserAgent header. - */ - USER_AGENT: "User-Agent", +const StorageOAuthScopes = "https://storage.azure.com/.default"; +const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", }, }; +const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", +}; +const ETagNone = ""; +const ETagAny = "*"; +const SIZE_1_MB = 1 * 1024 * 1024; +const BATCH_MAX_REQUEST = 256; +const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +const HTTP_LINE_ENDING = "\r\n"; +const HTTP_VERSION_1_1 = "HTTP/1.1"; +const EncryptionAlgorithmAES25 = "AES256"; +const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; +/// List of ports used for path style addressing. +/// Path style addressing means that storage account is put in URI's Path segment in instead of in host. +const PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104", +]; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** - * Default key used to access the XML attributes. - */ -const XML_ATTRKEY = "$"; -/** - * Default key used to access the XML value content. - */ -const XML_CHARKEY = "_"; - -// Copyright (c) Microsoft Corporation. -const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; -/** - * Encodes an URI. + * Reserved URL characters must be properly escaped for Storage services like Blob or File. * - * @param uri - The URI to be encoded. - * @returns The encoded URI. - */ -function encodeUri(uri) { - return encodeURIComponent(uri) - .replace(/!/g, "%21") - .replace(/"/g, "%27") - .replace(/\(/g, "%28") - .replace(/\)/g, "%29") - .replace(/\*/g, "%2A"); -} -/** - * Returns a stripped version of the Http Response which only contains body, - * headers and the status. + * ## URL encode and escape strategy for JS SDKs * - * @param response - The Http Response - * @returns The stripped version of Http Response. - */ -function stripResponse(response) { - const strippedResponse = {}; - strippedResponse.body = response.bodyAsText; - strippedResponse.headers = response.headers; - strippedResponse.status = response.status; - return strippedResponse; -} -/** - * Returns a stripped version of the Http Request that does not contain the - * Authorization header. + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. * - * @param request - The Http Request object - * @returns The stripped version of Http Request. - */ -function stripRequest(request) { - const strippedRequest = request.clone(); - if (strippedRequest.headers) { - strippedRequest.headers.remove("authorization"); - } - return strippedRequest; -} -/** - * Validates the given uuid as a string + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. * - * @param uuid - The uuid as a string that needs to be validated - * @returns True if the uuid is valid; false otherwise. - */ -function isValidUuid(uuid) { - return validUuidRegex.test(uuid); -} -/** - * Generated UUID + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. * - * @returns RFC4122 v4 UUID. - */ -function generateUuid() { - return uuid.v4(); -} -/** - * Executes an array of promises sequentially. Inspiration of this method is here: - * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. * - * @param promiseFactories - An array of promise factories(A function that return a promise) - * @param kickstart - Input to the first promise that is used to kickstart the promise chain. - * If not provided then the promise chain starts with undefined. - * @returns A chain of resolved or rejected promises + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - */ -function executePromisesSequentially(promiseFactories, kickstart) { - let result = Promise.resolve(kickstart); - promiseFactories.forEach((promiseFactory) => { - result = result.then(promiseFactory); - }); - return result; +function escapeURLPath(url) { + const urlParsed = new URL(url); + let path = urlParsed.pathname; + path = path || "/"; + path = escape(path); + urlParsed.pathname = path; + return urlParsed.toString(); } -/** - * Converts a Promise to a callback. - * @param promise - The Promise to be converted to a callback - * @returns A function that takes the callback `(cb: Function) => void` - * @deprecated generated code should instead depend on responseToBody - */ -// eslint-disable-next-line @typescript-eslint/ban-types -function promiseToCallback(promise) { - if (typeof promise.then !== "function") { - throw new Error("The provided input is not a Promise."); +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } } - // eslint-disable-next-line @typescript-eslint/ban-types - return (cb) => { - promise - .then((data) => { - // eslint-disable-next-line promise/no-callback-in-promise - return cb(undefined, data); - }) - .catch((err) => { - // eslint-disable-next-line promise/no-callback-in-promise - cb(err); - }); - }; + return proxyUri; +} +function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; } /** - * Converts a Promise to a service callback. - * @param promise - The Promise of HttpOperationResponse to be converted to a service callback - * @returns A function that takes the service callback (cb: ServiceCallback): void + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. */ -function promiseToServiceCallback(promise) { - if (typeof promise.then !== "function") { - throw new Error("The provided input is not a Promise."); +function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; } - return (cb) => { - promise - .then((data) => { - return process.nextTick(cb, undefined, data.parsedBody, data.request, data); - }) - .catch((err) => { - process.nextTick(cb, err); - }); - }; -} -function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { - if (!Array.isArray(obj)) { - obj = [obj]; + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; } - if (!xmlNamespaceKey || !xmlNamespace) { - return { [elementName]: obj }; + else { + // SAS connection string + let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + // if accountName is empty, try to read it from BlobEndpoint + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + // client constructors assume accountSas does *not* start with ? + if (accountSas.startsWith("?")) { + accountSas = accountSas.substring(1); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } - const result = { [elementName]: obj }; - result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; - return result; } /** - * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor - * @param targetCtor - The target object on which the properties need to be applied. - * @param sourceCtors - An array of source objects from which the properties need to be taken. + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - */ -function applyMixins(targetCtorParam, sourceCtors) { - const castTargetCtorParam = targetCtorParam; - sourceCtors.forEach((sourceCtor) => { - Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { - castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; - }); - }); +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" } -const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; /** - * Indicates whether the given string is in ISO 8601 format. - * @param value - The value to be validated for ISO 8601 duration format. - * @returns `true` if valid, `false` otherwise. + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string */ -function isDuration(value) { - return validateISODuration.test(value); +function appendToURLPath(url, name) { + const urlParsed = new URL(url); + let path = urlParsed.pathname; + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.pathname = path; + return urlParsed.toString(); } /** - * Replace all of the instances of searchValue in value with the provided replaceValue. - * @param value - The value to search and replace in. - * @param searchValue - The value to search for in the value argument. - * @param replaceValue - The value to replace searchValue with in the value argument. - * @returns The value where each instance of searchValue was replaced with replacedValue. + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string */ -function replaceAll(value, searchValue, replaceValue) { - return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +function setURLParameter(url, name, value) { + const urlParsed = new URL(url); + const encodedName = encodeURIComponent(name); + const encodedValue = value ? encodeURIComponent(value) : undefined; + // mutating searchParams will change the encoding, so we have to do this ourselves + const searchString = urlParsed.search === "" ? "?" : urlParsed.search; + const searchPieces = []; + for (const pair of searchString.slice(1).split("&")) { + if (pair) { + const [key] = pair.split("=", 2); + if (key !== encodedName) { + searchPieces.push(pair); + } + } + } + if (encodedValue) { + searchPieces.push(`${encodedName}=${encodedValue}`); + } + urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return urlParsed.toString(); } /** - * Determines whether the given entity is a basic/primitive type - * (string, number, boolean, null, undefined). - * @param value - Any entity - * @returns true is it is primitive type, false otherwise. + * Get URL parameter by name. + * + * @param url - + * @param name - */ -function isPrimitiveType(value) { - return (typeof value !== "object" && typeof value !== "function") || value === null; -} -function getEnvironmentValue(name) { - if (process.env[name]) { - return process.env[name]; - } - else if (process.env[name.toLowerCase()]) { - return process.env[name.toLowerCase()]; - } - return undefined; +function getURLParameter(url, name) { + var _a; + const urlParsed = new URL(url); + return (_a = urlParsed.searchParams.get(name)) !== null && _a !== void 0 ? _a : undefined; } /** - * @internal - * @returns true when input is an object type that is not null, Array, RegExp, or Date. + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string */ -function isObject(input) { - return (typeof input === "object" && - input !== null && - !Array.isArray(input) && - !(input instanceof RegExp) && - !(input instanceof Date)); +function setURLHost(url, host) { + const urlParsed = new URL(url); + urlParsed.hostname = host; + return urlParsed.toString(); } - -// Copyright (c) Microsoft Corporation. -// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. /** - * Used to map raw response objects to final shapes. - * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. - * Also allows pulling values from headers, as well as inserting default values and constants. + * Get URL path from an URL string. + * + * @param url - Source URL string */ -class Serializer { - constructor( - /** - * The provided model mapper. - */ - modelMappers = {}, - /** - * Whether the contents are XML or not. - */ - isXML) { - this.modelMappers = modelMappers; - this.isXML = isXML; - } - /** - * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. - * @param mapper - The definition of data models. - * @param value - The value. - * @param objectName - Name of the object. Used in the error messages. - * @deprecated Removing the constraints validation on client side. - */ - validateConstraints(mapper, value, objectName) { - const failValidation = (constraintName, constraintValue) => { - throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); - }; - if (mapper.constraints && value != undefined) { - const valueAsNumber = value; - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; - if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { - failValidation("ExclusiveMaximum", ExclusiveMaximum); - } - if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) { - failValidation("ExclusiveMinimum", ExclusiveMinimum); - } - if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) { - failValidation("InclusiveMaximum", InclusiveMaximum); - } - if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { - failValidation("InclusiveMinimum", InclusiveMinimum); - } - const valueAsArray = value; - if (MaxItems != undefined && valueAsArray.length > MaxItems) { - failValidation("MaxItems", MaxItems); - } - if (MaxLength != undefined && valueAsArray.length > MaxLength) { - failValidation("MaxLength", MaxLength); - } - if (MinItems != undefined && valueAsArray.length < MinItems) { - failValidation("MinItems", MinItems); - } - if (MinLength != undefined && valueAsArray.length < MinLength) { - failValidation("MinLength", MinLength); - } - if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) { - failValidation("MultipleOf", MultipleOf); - } - if (Pattern) { - const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; - if (typeof value !== "string" || value.match(pattern) === null) { - failValidation("Pattern", Pattern); - } - } - if (UniqueItems && - valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { - failValidation("UniqueItems", UniqueItems); - } - } - } - /** - * Serialize the given object based on its metadata defined in the mapper. - * - * @param mapper - The mapper which defines the metadata of the serializable object. - * @param object - A valid Javascript object to be serialized. - * @param objectName - Name of the serialized object. - * @param options - additional options to deserialization. - * @returns A valid serialized Javascript object. - */ - serialize(mapper, object, objectName, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - let payload = {}; - const mapperType = mapper.type.name; - if (!objectName) { - objectName = mapper.serializedName; - } - if (mapperType.match(/^Sequence$/i) !== null) { - payload = []; - } - if (mapper.isConstant) { - object = mapper.defaultValue; - } - // This table of allowed values should help explain - // the mapper.required and mapper.nullable properties. - // X means "neither undefined or null are allowed". - // || required - // || true | false - // nullable || ========================== - // true || null | undefined/null - // false || X | undefined - // undefined || X | undefined/null - const { required, nullable } = mapper; - if (required && nullable && object === undefined) { - throw new Error(`${objectName} cannot be undefined.`); - } - if (required && !nullable && object == undefined) { - throw new Error(`${objectName} cannot be null or undefined.`); - } - if (!required && nullable === false && object === null) { - throw new Error(`${objectName} cannot be null.`); - } - if (object == undefined) { - payload = object; - } - else { - if (mapperType.match(/^any$/i) !== null) { - payload = object; - } - else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { - payload = serializeBasicTypes(mapperType, objectName, object); - } - else if (mapperType.match(/^Enum$/i) !== null) { - const enumMapper = mapper; - payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); - } - else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { - payload = serializeDateTypes(mapperType, object, objectName); - } - else if (mapperType.match(/^ByteArray$/i) !== null) { - payload = serializeByteArrayType(objectName, object); - } - else if (mapperType.match(/^Base64Url$/i) !== null) { - payload = serializeBase64UrlType(objectName, object); - } - else if (mapperType.match(/^Sequence$/i) !== null) { - payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - else if (mapperType.match(/^Composite$/i) !== null) { - payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - } - return payload; +function getURLPath(url) { + try { + const urlParsed = new URL(url); + return urlParsed.pathname; } - /** - * Deserialize the given object based on its metadata defined in the mapper. - * - * @param mapper - The mapper which defines the metadata of the serializable object. - * @param responseBody - A valid Javascript entity to be deserialized. - * @param objectName - Name of the deserialized object. - * @param options - Controls behavior of XML parser and builder. - * @returns A valid deserialized Javascript object. - */ - deserialize(mapper, responseBody, objectName, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - if (responseBody == undefined) { - if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { - // Edge case for empty XML non-wrapped lists. xml2js can't distinguish - // between the list being empty versus being missing, - // so let's do the more user-friendly thing and return an empty list. - responseBody = []; - } - // specifically check for undefined as default value can be a falsey value `0, "", false, null` - if (mapper.defaultValue !== undefined) { - responseBody = mapper.defaultValue; - } - return responseBody; - } - let payload; - const mapperType = mapper.type.name; - if (!objectName) { - objectName = mapper.serializedName; - } - if (mapperType.match(/^Composite$/i) !== null) { - payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); - } - else { - if (this.isXML) { - const xmlCharKey = updatedOptions.xmlCharKey; - const castResponseBody = responseBody; - /** - * If the mapper specifies this as a non-composite type value but the responseBody contains - * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, - * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. - */ - if (castResponseBody[XML_ATTRKEY] != undefined && - castResponseBody[xmlCharKey] != undefined) { - responseBody = castResponseBody[xmlCharKey]; - } - } - if (mapperType.match(/^Number$/i) !== null) { - payload = parseFloat(responseBody); - if (isNaN(payload)) { - payload = responseBody; - } - } - else if (mapperType.match(/^Boolean$/i) !== null) { - if (responseBody === "true") { - payload = true; - } - else if (responseBody === "false") { - payload = false; - } - else { - payload = responseBody; - } - } - else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { - payload = responseBody; - } - else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { - payload = new Date(responseBody); - } - else if (mapperType.match(/^UnixTime$/i) !== null) { - payload = unixTimeToDate(responseBody); - } - else if (mapperType.match(/^ByteArray$/i) !== null) { - payload = decodeString(responseBody); - } - else if (mapperType.match(/^Base64Url$/i) !== null) { - payload = base64UrlToByteArray(responseBody); - } - else if (mapperType.match(/^Sequence$/i) !== null) { - payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); - } - else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); - } - } - if (mapper.isConstant) { - payload = mapper.defaultValue; - } - return payload; + catch (e) { + return undefined; } } -function trimEnd(str, ch) { - let len = str.length; - while (len - 1 >= 0 && str[len - 1] === ch) { - --len; +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +function getURLScheme(url) { + try { + const urlParsed = new URL(url); + return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; } - return str.substr(0, len); -} -function bufferToBase64Url(buffer) { - if (!buffer) { + catch (e) { return undefined; } - if (!(buffer instanceof Uint8Array)) { - throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); - } - // Uint8Array to Base64. - const str = encodeByteArray(buffer); - // Base64 to Base64Url. - return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); } -function base64UrlToByteArray(str) { - if (!str) { - return undefined; +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +function getURLPathAndQuery(url) { + const urlParsed = new URL(url); + const pathString = urlParsed.pathname; + if (!pathString) { + throw new RangeError("Invalid url without valid path."); } - if (str && typeof str.valueOf() !== "string") { - throw new Error("Please provide an input of type string for converting to Uint8Array"); + let queryString = urlParsed.search || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' } - // Base64Url to Base64. - str = str.replace(/-/g, "+").replace(/_/g, "/"); - // Base64 to Uint8Array. - return decodeString(str); + return `${pathString}${queryString}`; } -function splitSerializeName(prop) { - const classes = []; - let partialclass = ""; - if (prop) { - const subwords = prop.split("."); - for (const item of subwords) { - if (item.charAt(item.length - 1) === "\\") { - partialclass += item.substr(0, item.length - 1) + "."; - } - else { - partialclass += item; - classes.push(partialclass); - partialclass = ""; - } - } +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +function getURLQueries(url) { + let queryString = new URL(url).search; + if (!queryString) { + return {}; } - return classes; + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; } -function dateToUnixTime(d) { - if (!d) { - return undefined; +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +function appendToURLQuery(url, queryParts) { + const urlParsed = new URL(url); + let query = urlParsed.search; + if (query) { + query += "&" + queryParts; } - if (typeof d.valueOf() === "string") { - d = new Date(d); + else { + query = queryParts; } - return Math.floor(d.getTime() / 1000); + urlParsed.search = query; + return urlParsed.toString(); } -function unixTimeToDate(n) { - if (!n) { - return undefined; +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; +} +/** + * Base64 encode. + * + * @param content - + */ +function base64encode(content) { + return !coreUtil.isNode ? btoa(content) : Buffer.from(content).toString("base64"); +} +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); } - return new Date(n * 1000); + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); } -function serializeBasicTypes(typeName, objectName, value) { - if (value !== null && value !== undefined) { - if (typeName.match(/^Number$/i) !== null) { - if (typeof value !== "number") { - throw new Error(`${objectName} with value ${value} must be of type number.`); - } - } - else if (typeName.match(/^String$/i) !== null) { - if (typeof value.valueOf() !== "string") { - throw new Error(`${objectName} with value "${value}" must be of type string.`); - } - } - else if (typeName.match(/^Uuid$/i) !== null) { - if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { - throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); - } - } - else if (typeName.match(/^Boolean$/i) !== null) { - if (typeof value !== "boolean") { - throw new Error(`${objectName} with value ${value} must be of type boolean.`); +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); } - } - else if (typeName.match(/^Stream$/i) !== null) { - const objectType = typeof value; - if (objectType !== "string" && - objectType !== "function" && - !(value instanceof ArrayBuffer) && - !ArrayBuffer.isView(value) && - !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { - throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); } - } - return value; + }); } -function serializeEnumType(objectName, allowedValues, value) { - if (!allowedValues) { - throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); } - const isPresent = allowedValues.some((item) => { - if (typeof item.valueOf() === "string") { - return item.toLowerCase() === value.toLowerCase(); - } - return item === value; - }); - if (!isPresent) { - throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; } - return value; -} -function serializeByteArrayType(objectName, value) { - let returnValue = ""; - if (value != undefined) { - if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); } - returnValue = encodeByteArray(value); + return padString.slice(0, targetLength) + currentString; } - return returnValue; } -function serializeBase64UrlType(objectName, value) { - let returnValue = ""; - if (value != undefined) { - if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); - } - returnValue = bufferToBase64Url(value) || ""; - } - return returnValue; +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); } -function serializeDateTypes(typeName, value, objectName) { - if (value != undefined) { - if (typeName.match(/^Date$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); - } - value = - value instanceof Date - ? value.toISOString().substring(0, 10) - : new Date(value).toISOString().substring(0, 10); - } - else if (typeName.match(/^DateTime$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); - } - value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); - } - else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); - } - value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); - } - else if (typeName.match(/^UnixTime$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + - `for it to be serialized in UnixTime/Epoch format.`); - } - value = dateToUnixTime(value); - } - else if (typeName.match(/^TimeSpan$/i) !== null) { - if (!isDuration(value)) { - throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); - } +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +function getAccountNameFromUrl(url) { + const parsedUrl = new URL(url); + let accountName; + try { + if (parsedUrl.hostname.split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.hostname.split(".")[0]; } - } - return value; -} -function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { - if (!Array.isArray(object)) { - throw new Error(`${objectName} must be of type Array.`); - } - const elementType = mapper.type.element; - if (!elementType || typeof elementType !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); - } - const tempArray = []; - for (let i = 0; i < object.length; i++) { - const serializedValue = serializer.serialize(elementType, object[i], objectName, options); - if (isXml && elementType.xmlNamespace) { - const xmlnsKey = elementType.xmlNamespacePrefix - ? `xmlns:${elementType.xmlNamespacePrefix}` - : "xmlns"; - if (elementType.type.name === "Composite") { - tempArray[i] = Object.assign({}, serializedValue); - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; - } - else { - tempArray[i] = {}; - tempArray[i][options.xmlCharKey] = serializedValue; - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; - } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.pathname.split("/")[1]; } else { - tempArray[i] = serializedValue; + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; } + return accountName; } - return tempArray; -} -function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { - if (typeof object !== "object") { - throw new Error(`${objectName} must be of type object.`); - } - const valueType = mapper.type.value; - if (!valueType || typeof valueType !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); + catch (error) { + throw new Error("Unable to extract accountName with provided information."); } - const tempDictionary = {}; - for (const key of Object.keys(object)) { - const serializedValue = serializer.serialize(valueType, object[key], objectName, options); - // If the element needs an XML namespace we need to add it within the $ property - tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); +} +function isIpEndpointStyle(parsedUrl) { + const host = parsedUrl.host; + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || + (Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port))); +} +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; } - // Add the namespace to the root element if needed - if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; - const result = tempDictionary; - result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; - return result; + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } } - return tempDictionary; + return tagPairs.join("&"); } /** - * Resolves the additionalProperties property from a referenced mapper. - * @param serializer - The serializer containing the entire set of mappers. - * @param mapper - The composite mapper to resolve. - * @param objectName - Name of the object being serialized. + * Convert Tags type to BlobTags. + * + * @param tags - */ -function resolveAdditionalProperties(serializer, mapper, objectName) { - const additionalProperties = mapper.type.additionalProperties; - if (!additionalProperties && mapper.type.className) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); - return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; +function toBlobTags(tags) { + if (tags === undefined) { + return undefined; } - return additionalProperties; + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } + } + return res; } /** - * Finds the mapper referenced by `className`. - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve - * @param objectName - Name of the object being serialized + * Covert BlobTags to Tags type. + * + * @param tags - */ -function resolveReferencedMapper(serializer, mapper, objectName) { - const className = mapper.type.className; - if (!className) { - throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); +function toTags(tags) { + if (tags === undefined) { + return undefined; } - return serializer.modelMappers[className]; + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; } /** - * Resolves a composite mapper's modelProperties. - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - */ -function resolveModelProperties(serializer, mapper, objectName) { - let modelProps = mapper.type.modelProperties; - if (!modelProps) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); - if (!modelMapper) { - throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); - } - modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; - if (!modelProps) { - throw new Error(`modelProperties cannot be null or undefined in the ` + - `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); - } +function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); } - return modelProps; } -function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { - if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { - mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); +function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; } - if (object != undefined) { - const payload = {}; - const modelProps = resolveModelProperties(serializer, mapper, objectName); - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - if (propertyMapper.readOnly) { - continue; - } - let propName; - let parentObject = payload; - if (serializer.isXML) { - if (propertyMapper.xmlIsWrapped) { - propName = propertyMapper.xmlName; - } - else { - propName = propertyMapper.xmlElementName || propertyMapper.xmlName; - } - } - else { - const paths = splitSerializeName(propertyMapper.serializedName); - propName = paths.pop(); - for (const pathName of paths) { - const childObject = parentObject[pathName]; - if (childObject == undefined && - (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { - parentObject[pathName] = {}; - } - parentObject = parentObject[pathName]; - } - } - if (parentObject != undefined) { - if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix - ? `xmlns:${mapper.xmlNamespacePrefix}` - : "xmlns"; - parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); - } - const propertyObjectName = propertyMapper.serializedName !== "" - ? objectName + "." + propertyMapper.serializedName - : objectName; - let toSerialize = object[key]; - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); - if (polymorphicDiscriminator && - polymorphicDiscriminator.clientName === key && - toSerialize == undefined) { - toSerialize = mapper.serializedName; - } - const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); - if (serializedValue !== undefined && propName != undefined) { - const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); - if (isXml && propertyMapper.xmlIsAttribute) { - // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. - // This keeps things simple while preventing name collision - // with names in user documents. - parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; - parentObject[XML_ATTRKEY][propName] = serializedValue; - } - else if (isXml && propertyMapper.xmlIsWrapped) { - parentObject[propName] = { [propertyMapper.xmlElementName]: value }; - } - else { - parentObject[propName] = value; - } - } - } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); } - const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); - if (additionalPropertiesMapper) { - const propNames = Object.keys(modelProps); - for (const clientPropName in object) { - const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); - if (isAdditionalProperty) { - payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); - } - } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); } - return payload; } - return object; + return orProperties; } -function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { - if (!isXml || !propertyMapper.xmlNamespace) { - return serializedValue; +function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); } - const xmlnsKey = propertyMapper.xmlNamespacePrefix - ? `xmlns:${propertyMapper.xmlNamespacePrefix}` - : "xmlns"; - const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; - if (["Composite"].includes(propertyMapper.type.name)) { - if (serializedValue[XML_ATTRKEY]) { - return serializedValue; + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; } else { - const result = Object.assign({}, serializedValue); - result[XML_ATTRKEY] = xmlNamespace; - return result; + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; } } - const result = {}; - result[options.xmlCharKey] = serializedValue; - result[XML_ATTRKEY] = xmlNamespace; - return result; + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + } } -function isSpecialXmlProperty(propertyName, options) { - return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName); +/** + * Escape the blobName but keep path separator ('/'). + */ +function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); + } + return split.join("/"); } -function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { - var _a, _b; - const xmlCharKey = (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { - mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); +/** + * A typesafe helper for ensuring that a given response object has + * the original _response attached. + * @param response - A response object from calling a client operation + * @returns The same object, but with known _response property + */ +function assertResponse(response) { + if (`_response` in response) { + return response; } - const modelProps = resolveModelProperties(serializer, mapper, objectName); - let instance = {}; - const handledPropertyNames = []; - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - const paths = splitSerializeName(modelProps[key].serializedName); - handledPropertyNames.push(paths[0]); - const { serializedName, xmlName, xmlElementName } = propertyMapper; - let propertyObjectName = objectName; - if (serializedName !== "" && serializedName !== undefined) { - propertyObjectName = objectName + "." + serializedName; - } - const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; - if (headerCollectionPrefix) { - const dictionary = {}; - for (const headerKey of Object.keys(responseBody)) { - if (headerKey.startsWith(headerCollectionPrefix)) { - dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); - } - handledPropertyNames.push(headerKey); - } - instance[key] = dictionary; - } - else if (serializer.isXML) { - if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { - instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); - } - else if (propertyMapper.xmlIsMsText) { - if (responseBody[xmlCharKey] !== undefined) { - instance[key] = responseBody[xmlCharKey]; - } - else if (typeof responseBody === "string") { - // The special case where xml parser parses "content" into JSON of - // `{ name: "content"}` instead of `{ name: { "_": "content" }}` - instance[key] = responseBody; - } - } - else { - const propertyName = xmlElementName || xmlName || serializedName; - if (propertyMapper.xmlIsWrapped) { - /* a list of wrapped by - For the xml example below - - ... - ... - - the responseBody has - { - Cors: { - CorsRule: [{...}, {...}] - } - } - xmlName is "Cors" and xmlElementName is"CorsRule". - */ - const wrapped = responseBody[xmlName]; - const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; - instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); - handledPropertyNames.push(xmlName); - } - else { - const property = responseBody[propertyName]; - instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); - handledPropertyNames.push(propertyName); - } - } - } - else { - // deserialize the property if it is present in the provided responseBody instance - let propertyInstance; - let res = responseBody; - // traversing the object step by step. - for (const item of paths) { - if (!res) - break; - res = res[item]; - } - propertyInstance = res; - const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; - // checking that the model property name (key)(ex: "fishtype") and the - // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") - // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") - // is a better approach. The generator is not consistent with escaping '\.' in the - // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator - // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, - // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and - // the transformation of model property name (ex: "fishtype") is done consistently. - // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. - if (polymorphicDiscriminator && - key === polymorphicDiscriminator.clientName && - propertyInstance == undefined) { - propertyInstance = mapper.serializedName; - } - let serializedValue; - // paging - if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { - propertyInstance = responseBody[key]; - const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); - // Copy over any properties that have already been added into the instance, where they do - // not exist on the newly de-serialized array - for (const [k, v] of Object.entries(instance)) { - if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { - arrayInstance[k] = v; - } - } - instance = arrayInstance; - } - else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { - serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); - instance[key] = serializedValue; - } - } - } - const additionalPropertiesMapper = mapper.type.additionalProperties; - if (additionalPropertiesMapper) { - const isAdditionalProperty = (responsePropName) => { - for (const clientPropName in modelProps) { - const paths = splitSerializeName(modelProps[clientPropName].serializedName); - if (paths[0] === responsePropName) { - return false; - } - } - return true; - }; - for (const responsePropName in responseBody) { - if (isAdditionalProperty(responsePropName)) { - instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); - } - } - } - else if (responseBody) { - for (const key of Object.keys(responseBody)) { - if (instance[key] === undefined && - !handledPropertyNames.includes(key) && - !isSpecialXmlProperty(key, options)) { - instance[key] = responseBody[key]; - } - } - } - return instance; -} -function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { - const value = mapper.type.value; - if (!value || typeof value !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); - } - if (responseBody) { - const tempDictionary = {}; - for (const key of Object.keys(responseBody)) { - tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); - } - return tempDictionary; - } - return responseBody; -} -function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { - const element = mapper.type.element; - if (!element || typeof element !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); - } - if (responseBody) { - if (!Array.isArray(responseBody)) { - // xml2js will interpret a single element array as just the element, so force it to be an array - responseBody = [responseBody]; - } - const tempArray = []; - for (let i = 0; i < responseBody.length; i++) { - tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); - } - return tempArray; - } - return responseBody; -} -function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); - if (polymorphicDiscriminator) { - const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; - if (discriminatorName != undefined) { - const discriminatorValue = object[discriminatorName]; - if (discriminatorValue != undefined) { - const typeName = mapper.type.uberParent || mapper.type.className; - const indexDiscriminator = discriminatorValue === typeName - ? discriminatorValue - : typeName + "." + discriminatorValue; - const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; - if (polymorphicMapper) { - mapper = polymorphicMapper; - } - } - } - } - return mapper; -} -function getPolymorphicDiscriminatorRecursively(serializer, mapper) { - return (mapper.type.polymorphicDiscriminator || - getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || - getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); -} -function getPolymorphicDiscriminatorSafely(serializer, typeName) { - return (typeName && - serializer.modelMappers[typeName] && - serializer.modelMappers[typeName].type.polymorphicDiscriminator); -} -/** - * Utility function that serializes an object that might contain binary information into a plain object, array or a string. - */ -function serializeObject(toSerialize) { - const castToSerialize = toSerialize; - if (toSerialize == undefined) - return undefined; - if (toSerialize instanceof Uint8Array) { - toSerialize = encodeByteArray(toSerialize); - return toSerialize; - } - else if (toSerialize instanceof Date) { - return toSerialize.toISOString(); - } - else if (Array.isArray(toSerialize)) { - const array = []; - for (let i = 0; i < toSerialize.length; i++) { - array.push(serializeObject(toSerialize[i])); - } - return array; - } - else if (typeof toSerialize === "object") { - const dictionary = {}; - for (const property in toSerialize) { - dictionary[property] = serializeObject(castToSerialize[property]); - } - return dictionary; - } - return toSerialize; -} -/** - * Utility function to create a K:V from a list of strings - */ -function strEnum(o) { - const result = {}; - for (const key of o) { - result[key] = key; - } - return result; + throw new TypeError(`Unexpected response object ${response}`); } -/** - * String enum containing the string types of property mappers. - */ -// eslint-disable-next-line @typescript-eslint/no-redeclare -const MapperType = strEnum([ - "Base64Url", - "Boolean", - "ByteArray", - "Composite", - "Date", - "DateTime", - "DateTimeRfc1123", - "Dictionary", - "Enum", - "Number", - "Object", - "Sequence", - "String", - "Stream", - "TimeSpan", - "UnixTime", -]); // Copyright (c) Microsoft Corporation. -function isWebResourceLike(object) { - if (object && typeof object === "object") { - const castObject = object; - if (typeof castObject.url === "string" && - typeof castObject.method === "string" && - typeof castObject.headers === "object" && - isHttpHeadersLike(castObject.headers) && - typeof castObject.validateRequestProperties === "function" && - typeof castObject.prepare === "function" && - typeof castObject.clone === "function") { - return true; - } - } - return false; -} +// Licensed under the MIT license. /** - * Creates a new WebResource object. - * - * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary - * properties to initiate a request. + * RetryPolicy types. */ -class WebResource { - constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { - this.streamResponseBody = streamResponseBody; - this.streamResponseStatusCodes = streamResponseStatusCodes; - this.url = url || ""; - this.method = method || "GET"; - this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); - this.body = body; - this.query = query; - this.formData = undefined; - this.withCredentials = withCredentials || false; - this.abortSignal = abortSignal; - this.timeout = timeout || 0; - this.onUploadProgress = onUploadProgress; - this.onDownloadProgress = onDownloadProgress; - this.proxySettings = proxySettings; - this.keepAlive = keepAlive; - this.decompressResponse = decompressResponse; - this.requestId = this.headers.get("x-ms-client-request-id") || generateUuid(); - } - /** - * Validates that the required properties such as method, url, headers["Content-Type"], - * headers["accept-language"] are defined. It will throw an error if one of the above - * mentioned properties are not defined. - */ - validateRequestProperties() { - if (!this.method) { - throw new Error("WebResource.method is required."); - } - if (!this.url) { - throw new Error("WebResource.url is required."); - } - } +exports.StorageRetryPolicyType = void 0; +(function (StorageRetryPolicyType) { /** - * Prepares the request. - * @param options - Options to provide for preparing the request. - * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + * Exponential retry. Retry time delay grows exponentially. */ - prepare(options) { - if (!options) { - throw new Error("options object is required"); - } - if (options.method === undefined || - options.method === null || - typeof options.method.valueOf() !== "string") { - throw new Error("options.method must be a string."); - } - if (options.url && options.pathTemplate) { - throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); - } - if ((options.pathTemplate === undefined || - options.pathTemplate === null || - typeof options.pathTemplate.valueOf() !== "string") && - (options.url === undefined || - options.url === null || - typeof options.url.valueOf() !== "string")) { - throw new Error("Please provide exactly one of options.pathTemplate or options.url."); - } - // set the url if it is provided. - if (options.url) { - if (typeof options.url !== "string") { - throw new Error('options.url must be of type "string".'); - } - this.url = options.url; - } - // set the method - if (options.method) { - const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; - if (validMethods.indexOf(options.method.toUpperCase()) === -1) { - throw new Error('The provided method "' + - options.method + - '" is invalid. Supported HTTP methods are: ' + - JSON.stringify(validMethods)); - } - } - this.method = options.method.toUpperCase(); - // construct the url if path template is provided - if (options.pathTemplate) { - const { pathTemplate, pathParameters } = options; - if (typeof pathTemplate !== "string") { - throw new Error('options.pathTemplate must be of type "string".'); - } - if (!options.baseUrl) { - options.baseUrl = "https://management.azure.com"; - } - const baseUrl = options.baseUrl; - let url = baseUrl + - (baseUrl.endsWith("/") ? "" : "/") + - (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); - const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); - if (segments && segments.length) { - if (!pathParameters) { - throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); - } - segments.forEach(function (item) { - const pathParamName = item.slice(1, -1); - const pathParam = pathParameters[pathParamName]; - if (pathParam === null || - pathParam === undefined || - !(typeof pathParam === "string" || typeof pathParam === "object")) { - const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); - throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + - ` however, it is not present in parameters: ${stringifiedPathParameters}.` + - `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + - `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); - } - if (typeof pathParam.valueOf() === "string") { - url = url.replace(item, encodeURIComponent(pathParam)); - } - if (typeof pathParam.valueOf() === "object") { - if (!pathParam.value) { - throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); - } - if (pathParam.skipUrlEncoding) { - url = url.replace(item, pathParam.value); - } - else { - url = url.replace(item, encodeURIComponent(pathParam.value)); - } - } - }); - } - this.url = url; - } - // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. - if (options.queryParameters) { - const queryParameters = options.queryParameters; - if (typeof queryParameters !== "object") { - throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + - `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + - `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); - } - // append question mark if it is not present in the url - if (this.url && this.url.indexOf("?") === -1) { - this.url += "?"; - } - // construct queryString - const queryParams = []; - // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). - this.query = {}; - for (const queryParamName in queryParameters) { - const queryParam = queryParameters[queryParamName]; - if (queryParam) { - if (typeof queryParam === "string") { - queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); - this.query[queryParamName] = encodeURIComponent(queryParam); - } - else if (typeof queryParam === "object") { - if (!queryParam.value) { - throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); - } - if (queryParam.skipUrlEncoding) { - queryParams.push(queryParamName + "=" + queryParam.value); - this.query[queryParamName] = queryParam.value; - } - else { - queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); - this.query[queryParamName] = encodeURIComponent(queryParam.value); - } - } - } - } // end-of-for - // append the queryString - this.url += queryParams.join("&"); - } - // add headers to the request if they are provided - if (options.headers) { - const headers = options.headers; - for (const headerName of Object.keys(options.headers)) { - this.headers.set(headerName, headers[headerName]); - } - } - // ensure accept-language is set correctly - if (!this.headers.get("accept-language")) { - this.headers.set("accept-language", "en-US"); - } - // ensure the request-id is set correctly - if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { - this.headers.set("x-ms-client-request-id", this.requestId); - } - // default - if (!this.headers.get("Content-Type")) { - this.headers.set("Content-Type", "application/json; charset=utf-8"); - } - // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly - this.body = options.body; - if (options.body !== undefined && options.body !== null) { - // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. - if (options.bodyIsStream) { - if (!this.headers.get("Transfer-Encoding")) { - this.headers.set("Transfer-Encoding", "chunked"); - } - if (this.headers.get("Content-Type") !== "application/octet-stream") { - this.headers.set("Content-Type", "application/octet-stream"); - } - } - else { - if (options.serializationMapper) { - this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); - } - if (!options.disableJsonStringifyOnBody) { - this.body = JSON.stringify(options.body); - } - } - } - if (options.spanOptions) { - this.spanOptions = options.spanOptions; - } - if (options.tracingContext) { - this.tracingContext = options.tracingContext; - } - this.abortSignal = options.abortSignal; - this.onDownloadProgress = options.onDownloadProgress; - this.onUploadProgress = options.onUploadProgress; - return this; - } + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; /** - * Clone this WebResource HTTP request object. - * @returns The clone of this WebResource HTTP request object. + * Linear retry. Retry time delay grows linearly. */ - clone() { - const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); - if (this.formData) { - result.formData = this.formData; - } - if (this.operationSpec) { - result.operationSpec = this.operationSpec; - } - if (this.shouldDeserialize) { - result.shouldDeserialize = this.shouldDeserialize; - } - if (this.operationResponseGetter) { - result.operationResponseGetter = this.operationResponseGetter; - } - return result; - } -} - -// Copyright (c) Microsoft Corporation. + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS$1 = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const RETRY_ABORT_ERROR$1 = new abortController.AbortError("The operation was aborted."); /** - * A class that handles the query portion of a URLBuilder. + * Retry policy with exponential retry and linear retry implemented. */ -class URLQuery { - constructor() { - this._rawQuery = {}; - } +class StorageRetryPolicy extends BaseRequestPolicy { /** - * Get whether or not there any query parameters in this URLQuery. + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - */ - any() { - return Object.keys(this._rawQuery).length > 0; + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS$1) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS$1.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS$1.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS$1.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS$1.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS$1.secondaryHost, + }; } /** - * Get the keys of the query string. + * Sends request. + * + * @param request - */ - keys() { - return Object.keys(this._rawQuery); + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); } /** - * Set a query parameter with the provided name and value. If the parameterValue is undefined or - * empty, then this will attempt to remove an existing query parameter with the provided - * parameterName. + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. */ - set(parameterName, parameterValue) { - const caseParameterValue = parameterValue; - if (parameterName) { - if (caseParameterValue !== undefined && caseParameterValue !== null) { - const newValue = Array.isArray(caseParameterValue) - ? caseParameterValue - : caseParameterValue.toString(); - this._rawQuery[parameterName] = newValue; + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; } - else { - delete this._rawQuery[parameterName]; + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; } } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); } /** - * Get the value of the query parameter with the provided name. If no parameter exists with the - * provided parameter name, then undefined will be returned. - */ - get(parameterName) { - return parameterName ? this._rawQuery[parameterName] : undefined; - } - /** - * Get the string representation of this query. The return value will not start with a "?". + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - */ - toString() { - let result = ""; - for (const parameterName in this._rawQuery) { - if (result) { - result += "&"; - } - const parameterValue = this._rawQuery[parameterName]; - if (Array.isArray(parameterValue)) { - const parameterStrings = []; - for (const parameterValueElement of parameterValue) { - parameterStrings.push(`${parameterName}=${parameterValueElement}`); + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; } - result += parameterStrings.join("&"); - } - else { - result += `${parameterName}=${parameterValue}`; } } - return result; - } - /** - * Parse a URLQuery from the provided text. - */ - static parse(text) { - const result = new URLQuery(); - if (text) { - if (text.startsWith("?")) { - text = text.substring(1); - } - let currentState = "ParameterName"; - let parameterName = ""; - let parameterValue = ""; - for (let i = 0; i < text.length; ++i) { - const currentCharacter = text[i]; - switch (currentState) { - case "ParameterName": - switch (currentCharacter) { - case "=": - currentState = "ParameterValue"; - break; - case "&": - parameterName = ""; - parameterValue = ""; - break; - default: - parameterName += currentCharacter; - break; - } - break; - case "ParameterValue": - switch (currentCharacter) { - case "&": - result.set(parameterName, parameterValue); - parameterName = ""; - parameterValue = ""; - currentState = "ParameterName"; - break; - default: - parameterValue += currentCharacter; - break; - } - break; - default: - throw new Error("Unrecognized URLQuery parse state: " + currentState); - } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; } - if (currentState === "ParameterValue") { - result.set(parameterName, parameterValue); + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; } } - return result; + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; } -} -/** - * A class that handles creating, modifying, and parsing URLs. - */ -class URLBuilder { /** - * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL - * (such as a host, port, path, or query), those parts will be added to this URL as well. + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - */ - setScheme(scheme) { - if (!scheme) { - this._scheme = undefined; + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } } else { - this.set(scheme, "SCHEME"); + delayTimeInMs = Math.random() * 1000; } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR$1); } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +class StorageRetryPolicyFactory { /** - * Get the scheme that has been set in this URL. + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - */ - getScheme() { - return this._scheme; + constructor(retryOptions) { + this.retryOptions = retryOptions; } /** - * Set the host for this URL. If the provided host contains other parts of a URL (such as a - * port, path, or query), those parts will be added to this URL as well. + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - */ - setHost(host) { - if (!host) { - this._host = undefined; - } - else { - this.set(host, "SCHEME_OR_HOST"); - } + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +class CredentialPolicy extends BaseRequestPolicy { /** - * Get the host that has been set in this URL. + * Sends out request. + * + * @param request - */ - getHost() { - return this._host; + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); } /** - * Set the port for this URL. If the provided port contains other parts of a URL (such as a - * path or query), those parts will be added to this URL as well. + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - */ - setPort(port) { - if (port === undefined || port === null || port === "") { - this._port = undefined; - } - else { - this.set(port.toString(), "PORT"); - } + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +class StorageSharedKeyCredentialPolicy extends CredentialPolicy { /** - * Get the port that has been set in this URL. + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - */ - getPort() { - return this._port; + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; } /** - * Set the path for this URL. If the provided path contains a query, then it will be added to - * this URL as well. + * Signs request. + * + * @param request - */ - setPath(path) { - if (!path) { - this._path = undefined; - } - else { - const schemeIndex = path.indexOf("://"); - if (schemeIndex !== -1) { - const schemeStart = path.lastIndexOf("/", schemeIndex); - // Make sure to only grab the URL part of the path before setting the state back to SCHEME - // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" - this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); - } - else { - this.set(path, "PATH"); - } - } - } - /** - * Append the provided path to this URL's existing path. If the provided path contains a query, - * then it will be added to this URL as well. - */ - appendPath(path) { - if (path) { - let currentPath = this.getPath(); - if (currentPath) { - if (!currentPath.endsWith("/")) { - currentPath += "/"; - } - if (path.startsWith("/")) { - path = path.substring(1); - } - path = currentPath + path; - } - this.set(path, "PATH"); + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; } /** - * Get the path that has been set in this URL. - */ - getPath() { - return this._path; - } - /** - * Set the query in this URL. + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - */ - setQuery(query) { - if (!query) { - this._query = undefined; + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; } - else { - this._query = URLQuery.parse(query); + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; } + return value; } /** - * Set a query parameter with the provided name and value in this URL's query. If the provided - * query parameter value is undefined or empty, then the query parameter will be removed if it - * existed. + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - */ - setQueryParameter(queryParameterName, queryParameterValue) { - if (queryParameterName) { - if (!this._query) { - this._query = new URLQuery(); + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; } - this._query.set(queryParameterName, queryParameterValue); - } - } - /** - * Get the value of the query parameter with the provided query parameter name. If no query - * parameter exists with the provided name, then undefined will be returned. - */ - getQueryParameterValue(queryParameterName) { - return this._query ? this._query.get(queryParameterName) : undefined; - } - /** - * Get the query in this URL. - */ - getQuery() { - return this._query ? this._query.toString() : undefined; + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; } /** - * Set the parts of this URL by parsing the provided text using the provided startState. + * Retrieves the webResource canonicalized resource string. + * + * @param request - */ - set(text, startState) { - const tokenizer = new URLTokenizer(text, startState); - while (tokenizer.next()) { - const token = tokenizer.current(); - let tokenPath; - if (token) { - switch (token.type) { - case "SCHEME": - this._scheme = token.text || undefined; - break; - case "HOST": - this._host = token.text || undefined; - break; - case "PORT": - this._port = token.text || undefined; - break; - case "PATH": - tokenPath = token.text || undefined; - if (!this._path || this._path === "/" || tokenPath !== "/") { - this._path = tokenPath; - } - break; - case "QUERY": - this._query = URLQuery.parse(token.text); - break; - default: - throw new Error(`Unrecognized URLTokenType: ${token.type}`); + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); } } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } } + return canonicalizedResourceString; } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +class Credential { /** - * Serializes the URL as a string. - * @returns the URL as a string. + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - */ - toString() { - let result = ""; - if (this._scheme) { - result += `${this._scheme}://`; - } - if (this._host) { - result += this._host; - } - if (this._port) { - result += `:${this._port}`; - } - if (this._path) { - if (!this._path.startsWith("/")) { - result += "/"; - } - result += this._path; - } - if (this._query && this._query.any()) { - result += `?${this._query.toString()}`; - } - return result; + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +class StorageSharedKeyCredential extends Credential { /** - * If the provided searchValue is found in this URLBuilder, then replace it with the provided - * replaceValue. + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - */ - replaceAll(searchValue, replaceValue) { - if (searchValue) { - this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); - this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); - this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); - this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); - this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); - } + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); } /** - * Parses a given string URL into a new {@link URLBuilder}. + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - */ - static parse(text) { - const result = new URLBuilder(); - result.set(text, "SCHEME_OR_HOST"); - return result; - } -} -class URLToken { - constructor(text, type) { - this.text = text; - this.type = type; - } - static scheme(text) { - return new URLToken(text, "SCHEME"); - } - static host(text) { - return new URLToken(text, "HOST"); - } - static port(text) { - return new URLToken(text, "PORT"); - } - static path(text) { - return new URLToken(text, "PATH"); + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); } - static query(text) { - return new URLToken(text, "QUERY"); + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Get whether or not the provided character (single character string) is an alphanumeric (letter or - * digit) character. - */ -function isAlphaNumericCharacter(character) { - const characterCode = character.charCodeAt(0); - return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || - (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || - (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); -} -/** - * A class that tokenizes URL strings. + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). */ -class URLTokenizer { - constructor(_text, state) { - this._text = _text; - this._textLength = _text ? _text.length : 0; - this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; - this._currentIndex = 0; - } +class AnonymousCredentialPolicy extends CredentialPolicy { /** - * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer - * hasn't started or has finished tokenizing. + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - */ - current() { - return this._currentToken; + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +class AnonymousCredential extends Credential { /** - * Advance to the next URLToken and return whether or not a URLToken was found. + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - */ - next() { - if (!hasCurrentCharacter(this)) { - this._currentToken = undefined; - } - else { - switch (this._currentState) { - case "SCHEME": - nextScheme(this); - break; - case "SCHEME_OR_HOST": - nextSchemeOrHost(this); - break; - case "HOST": - nextHost(this); - break; - case "PORT": - nextPort(this); - break; - case "PATH": - nextPath(this); - break; - case "QUERY": - nextQuery(this); - break; - default: - throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); - } - } - return !!this._currentToken; + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); } } -/** - * Read the remaining characters from this Tokenizer's character stream. - */ -function readRemaining(tokenizer) { - let result = ""; - if (tokenizer._currentIndex < tokenizer._textLength) { - result = tokenizer._text.substring(tokenizer._currentIndex); - tokenizer._currentIndex = tokenizer._textLength; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +let _defaultHttpClient; +function getCachedDefaultHttpClient() { + if (!_defaultHttpClient) { + _defaultHttpClient = coreRestPipeline.createDefaultHttpClient(); } - return result; + return _defaultHttpClient; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Whether or not this URLTokenizer has a current character. + * The programmatic identifier of the StorageBrowserPolicy. */ -function hasCurrentCharacter(tokenizer) { - return tokenizer._currentIndex < tokenizer._textLength; -} +const storageBrowserPolicyName = "storageBrowserPolicy"; /** - * Get the character in the text string at the current index. + * storageBrowserPolicy is a policy used to prevent browsers from caching requests + * and to remove cookies and explicit content-length headers. */ -function getCurrentCharacter(tokenizer) { - return tokenizer._text[tokenizer._currentIndex]; +function storageBrowserPolicy() { + return { + name: storageBrowserPolicyName, + async sendRequest(request, next) { + if (coreUtil.isNode) { + return next(request); + } + if (request.method === "GET" || request.method === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.delete(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.delete(HeaderConstants.CONTENT_LENGTH); + return next(request); + }, + }; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Advance to the character in text that is "step" characters ahead. If no step value is provided, - * then step will default to 1. + * Name of the {@link storageRetryPolicy} */ -function nextCharacter(tokenizer, step) { - if (hasCurrentCharacter(tokenizer)) { - if (!step) { - step = 1; - } - tokenizer._currentIndex += step; - } -} +const storageRetryPolicyName = "storageRetryPolicy"; /** - * Starting with the current character, peek "charactersToPeek" number of characters ahead in this - * Tokenizer's stream of characters. + * RetryPolicy types. */ -function peekCharacters(tokenizer, charactersToPeek) { - let endIndex = tokenizer._currentIndex + charactersToPeek; - if (tokenizer._textLength < endIndex) { - endIndex = tokenizer._textLength; - } - return tokenizer._text.substring(tokenizer._currentIndex, endIndex); -} +var StorageRetryPolicyType; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(StorageRetryPolicyType || (StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", +]; +const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); /** - * Read characters from this Tokenizer until the end of the stream or until the provided condition - * is false when provided the current character. + * Retry policy with exponential retry and linear retry implemented. */ -function readWhile(tokenizer, condition) { - let result = ""; - while (hasCurrentCharacter(tokenizer)) { - const currentCharacter = getCurrentCharacter(tokenizer); - if (!condition(currentCharacter)) { - break; +function storageRetryPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f; + const retryPolicyType = (_a = options.retryPolicyType) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = (_b = options.maxTries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = (_c = options.retryDelayInMs) !== null && _c !== void 0 ? _c : DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = (_d = options.maxRetryDelayInMs) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + function shouldRetry({ isPrimaryRetry, attempt, response, error, }) { + var _a, _b; + if (attempt >= maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + return false; + } + if (error) { + for (const retriableError of retriableErrors) { + if (error.name.toUpperCase().includes(retriableError) || + error.message.toUpperCase().includes(retriableError) || + (error.code && error.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + if ((error === null || error === void 0 ? void 0 : error.code) === "PARSE_ERROR" && + (error === null || error === void 0 ? void 0 : error.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || error) { + const statusCode = (_b = (_a = response === null || response === void 0 ? void 0 : response.status) !== null && _a !== void 0 ? _a : error === null || error === void 0 ? void 0 : error.statusCode) !== null && _b !== void 0 ? _b : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + return false; + } + function calculateDelay(isPrimaryRetry, attempt) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (retryPolicyType) { + case StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); + break; + case StorageRetryPolicyType.FIXED: + delayTimeInMs = retryDelayInMs; + break; + } } else { - result += currentCharacter; - nextCharacter(tokenizer); + delayTimeInMs = Math.random() * 1000; } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delayTimeInMs; } - return result; + return { + name: storageRetryPolicyName, + async sendRequest(request, next) { + // Set the server-side timeout query parameter "timeout=[seconds]" + if (tryTimeoutInMs) { + request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1000))); + } + const primaryUrl = request.url; + const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : undefined; + let secondaryHas404 = false; + let attempt = 1; + let retryAgain = true; + let response; + let error; + while (retryAgain) { + const isPrimaryRetry = secondaryHas404 || + !secondaryUrl || + !["GET", "HEAD", "OPTIONS"].includes(request.method) || + attempt % 2 === 1; + request.url = isPrimaryRetry ? primaryUrl : secondaryUrl; + response = undefined; + error = undefined; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await next(request); + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (e) { + if (coreRestPipeline.isRestError(e)) { + logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + error = e; + } + else { + logger.error(`RetryPolicy: Caught error, message: ${coreUtil.getErrorMessage(e)}`); + throw e; + } + } + retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error }); + if (retryAgain) { + await delay(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); + } + attempt++; + } + if (response) { + return response; + } + throw error !== null && error !== void 0 ? error : new coreRestPipeline.RestError("RetryPolicy failed without known error."); + }, + }; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Read characters from this Tokenizer until a non-alphanumeric character or the end of the - * character stream is reached. + * The programmatic identifier of the storageSharedKeyCredentialPolicy. */ -function readWhileLetterOrDigit(tokenizer) { - return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); -} +const storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; /** - * Read characters from this Tokenizer until one of the provided terminating characters is read or - * the end of the character stream is reached. + * storageSharedKeyCredentialPolicy handles signing requests using storage account keys. */ -function readUntilCharacter(tokenizer, ...terminatingCharacters) { - return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); -} -function nextScheme(tokenizer) { - const scheme = readWhileLetterOrDigit(tokenizer); - tokenizer._currentToken = URLToken.scheme(scheme); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else { - tokenizer._currentState = "HOST"; - } -} -function nextSchemeOrHost(tokenizer) { - const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentToken = URLToken.host(schemeOrHost); - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === ":") { - if (peekCharacters(tokenizer, 3) === "://") { - tokenizer._currentToken = URLToken.scheme(schemeOrHost); - tokenizer._currentState = "HOST"; - } - else { - tokenizer._currentToken = URLToken.host(schemeOrHost); - tokenizer._currentState = "PORT"; +function storageSharedKeyCredentialPolicy(options) { + function signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || Buffer.isBuffer(request.body)) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } + const stringToSign = [ + request.method.toUpperCase(), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request, HeaderConstants.DATE), + getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + getCanonicalizedHeadersString(request) + + getCanonicalizedResourceString(request); + const signature = crypto.createHmac("sha256", options.accountKey) + .update(stringToSign, "utf8") + .digest("base64"); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); } - else { - tokenizer._currentToken = URLToken.host(schemeOrHost); - if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + */ + function getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; } - else { - tokenizer._currentState = "QUERY"; + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; } + return value; } -} -function nextHost(tokenizer) { - if (peekCharacters(tokenizer, 3) === "://") { - nextCharacter(tokenizer, 3); - } - const host = readUntilCharacter(tokenizer, ":", "/", "?"); - tokenizer._currentToken = URLToken.host(host); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === ":") { - tokenizer._currentState = "PORT"; - } - else if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextPort(tokenizer) { - if (getCurrentCharacter(tokenizer) === ":") { - nextCharacter(tokenizer); - } - const port = readUntilCharacter(tokenizer, "/", "?"); - tokenizer._currentToken = URLToken.port(port); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextPath(tokenizer) { - const path = readUntilCharacter(tokenizer, "?"); - tokenizer._currentToken = URLToken.path(path); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else { - tokenizer._currentState = "QUERY"; + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + */ + function getCanonicalizedHeadersString(request) { + let headersArray = []; + for (const [name, value] of request.headers) { + if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) { + headersArray.push({ name, value }); + } + } + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; } -} -function nextQuery(tokenizer) { - if (getCurrentCharacter(tokenizer) === "?") { - nextCharacter(tokenizer); + function getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${options.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; } - const query = readRemaining(tokenizer); - tokenizer._currentToken = URLToken.query(query); - tokenizer._currentState = "DONE"; + return { + name: storageSharedKeyCredentialPolicyName, + async sendRequest(request, next) { + signRequest(request); + return next(request); + }, + }; } // Copyright (c) Microsoft Corporation. -function createProxyAgent(requestUrl, proxySettings, headers) { - const host = URLBuilder.parse(proxySettings.host).getHost(); - if (!host) { - throw new Error("Expecting a non-empty host in proxy settings."); - } - if (!isValidPort(proxySettings.port)) { - throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); - } - const tunnelOptions = { - proxy: { - host: host, - port: proxySettings.port, - headers: (headers && headers.rawHeaders()) || {}, - }, - }; - if (proxySettings.username && proxySettings.password) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; +// Licensed under the MIT license. +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); } - else if (proxySettings.username) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreUtil.isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); } - const isRequestHttps = isUrlHttps(requestUrl); - const isProxyHttps = isUrlHttps(proxySettings.host); - const proxyAgent = { - isHttps: isRequestHttps, - agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), - }; - return proxyAgent; -} -function isUrlHttps(url) { - const urlScheme = URLBuilder.parse(url).getScheme() || ""; - return urlScheme.toLowerCase() === "https"; } -function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { - if (isRequestHttps && isProxyHttps) { - return tunnel__namespace.httpsOverHttps(tunnelOptions); - } - else if (isRequestHttps && !isProxyHttps) { - return tunnel__namespace.httpsOverHttp(tunnelOptions); - } - else if (!isRequestHttps && isProxyHttps) { - return tunnel__namespace.httpOverHttps(tunnelOptions); - } - else { - return tunnel__namespace.httpOverHttp(tunnelOptions); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); } } -function isValidPort(port) { - // any port in 0-65535 range is valid (RFC 793) even though almost all implementations - // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports - return 0 <= port && port <= 65535; -} // Copyright (c) Microsoft Corporation. -const RedactedString = "REDACTED"; -const defaultAllowedHeaderNames = [ - "x-ms-client-request-id", - "x-ms-return-client-request-id", - "x-ms-useragent", - "x-ms-correlation-request-id", - "x-ms-request-id", - "client-request-id", - "ms-cv", - "return-client-request-id", - "traceparent", - "Access-Control-Allow-Credentials", - "Access-Control-Allow-Headers", - "Access-Control-Allow-Methods", - "Access-Control-Allow-Origin", - "Access-Control-Expose-Headers", - "Access-Control-Max-Age", - "Access-Control-Request-Headers", - "Access-Control-Request-Method", - "Origin", - "Accept", - "Accept-Encoding", - "Cache-Control", - "Connection", - "Content-Length", - "Content-Type", - "Date", - "ETag", - "Expires", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "Last-Modified", - "Pragma", - "Request-Id", - "Retry-After", - "Server", - "Transfer-Encoding", - "User-Agent", - "WWW-Authenticate", -]; -const defaultAllowedQueryParameters = ["api-version"]; -class Sanitizer { - constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { - allowedHeaderNames = Array.isArray(allowedHeaderNames) - ? defaultAllowedHeaderNames.concat(allowedHeaderNames) - : defaultAllowedHeaderNames; - allowedQueryParameters = Array.isArray(allowedQueryParameters) - ? defaultAllowedQueryParameters.concat(allowedQueryParameters) - : defaultAllowedQueryParameters; - this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); - this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); - } - sanitize(obj) { - const seen = new Set(); - return JSON.stringify(obj, (key, value) => { - // Ensure Errors include their interesting non-enumerable members - if (value instanceof Error) { - return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); - } - if (key === "_headersMap") { - return this.sanitizeHeaders(value); - } - else if (key === "url") { - return this.sanitizeUrl(value); - } - else if (key === "query") { - return this.sanitizeQuery(value); - } - else if (key === "body") { - // Don't log the request body - return undefined; - } - else if (key === "response") { - // Don't log response again - return undefined; - } - else if (key === "operationSpec") { - // When using sendOperationRequest, the request carries a massive - // field with the autorest spec. No need to log it. - return undefined; - } - else if (Array.isArray(value) || isObject(value)) { - if (seen.has(value)) { - return "[Circular]"; - } - seen.add(value); - } - return value; - }, 2); - } - sanitizeHeaders(value) { - return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); - } - sanitizeQuery(value) { - return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); - } - sanitizeObject(value, allowedKeys, accessor) { - if (typeof value !== "object" || value === null) { - return value; - } - const sanitized = {}; - for (const k of Object.keys(value)) { - if (allowedKeys.has(k.toLowerCase())) { - sanitized[k] = accessor(value, k); - } - else { - sanitized[k] = RedactedString; - } - } - return sanitized; - } - sanitizeUrl(value) { - if (typeof value !== "string" || value === null) { - return value; - } - const urlBuilder = URLBuilder.parse(value); - const queryString = urlBuilder.getQuery(); - if (!queryString) { - return value; - } - const query = URLQuery.parse(queryString); - for (const k of query.keys()) { - if (!this.allowedQueryParameters.has(k.toLowerCase())) { - query.set(k, RedactedString); - } - } - urlBuilder.setQuery(query.toString()); - return urlBuilder.toString(); - } -} - -// Copyright (c) Microsoft Corporation. -const custom = util.inspect.custom; - -// Copyright (c) Microsoft Corporation. -const errorSanitizer = new Sanitizer(); -/** - * An error resulting from an HTTP request to a service endpoint. - */ -class RestError extends Error { - constructor(message, code, statusCode, request, response) { - super(message); - this.name = "RestError"; - this.code = code; - this.statusCode = statusCode; - this.request = request; - this.response = response; - Object.setPrototypeOf(this, RestError.prototype); - } - /** - * Logging method for util.inspect in Node - */ - [custom]() { - return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; - } -} -/** - * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) - */ -RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +// Licensed under the MIT license. /** - * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract */ -RestError.PARSE_ERROR = "PARSE_ERROR"; - -// Copyright (c) Microsoft Corporation. -const logger = logger$1.createClientLogger("core-http"); - -// Copyright (c) Microsoft Corporation. -function getCachedAgent(isHttps, agentCache) { - return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; -} -class ReportTransform extends stream.Transform { - constructor(progressCallback) { - super(); - this.progressCallback = progressCallback; - this.loadedBytes = 0; - } - _transform(chunk, _encoding, callback) { - this.push(chunk); - this.loadedBytes += chunk.length; - this.progressCallback({ loadedBytes: this.loadedBytes }); - callback(undefined); +function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; } -} -function isReadableStream(body) { - return body && typeof body.pipe === "function"; -} -function isStreamComplete(stream, aborter) { - return new Promise((resolve) => { - stream.once("close", () => { - aborter === null || aborter === void 0 ? void 0 : aborter.abort(); - resolve(); - }); - stream.once("end", resolve); - stream.once("error", resolve); - }); -} -/** - * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} - */ -function parseHeaders(headers) { - const httpHeaders = new HttpHeaders(); - headers.forEach((value, key) => { - httpHeaders.set(key, value); - }); - return httpHeaders; + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); } /** - * An HTTP client that uses `node-fetch`. + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. */ -class NodeFetchHttpClient { - constructor() { - // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent - this.proxyAgentMap = new Map(); - this.keepAliveAgents = {}; - } - /** - * Provides minimum viable error handling and the logic that executes the abstract methods. - * @param httpRequest - Object representing the outgoing HTTP request. - * @returns An object representing the incoming HTTP response. - */ - async sendRequest(httpRequest) { - var _a; - if (!httpRequest && typeof httpRequest !== "object") { - throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); - } - const abortController$1 = new abortController.AbortController(); - let abortListener; - if (httpRequest.abortSignal) { - if (httpRequest.abortSignal.aborted) { - throw new abortController.AbortError("The operation was aborted."); - } - abortListener = (event) => { - if (event.type === "abort") { - abortController$1.abort(); - } - }; - httpRequest.abortSignal.addEventListener("abort", abortListener); - } - if (httpRequest.timeout) { - setTimeout(() => { - abortController$1.abort(); - }, httpRequest.timeout); - } - if (httpRequest.formData) { - const formData = httpRequest.formData; - const requestForm = new FormData__default["default"](); - const appendFormValue = (key, value) => { - // value function probably returns a stream so we can provide a fresh stream on each retry - if (typeof value === "function") { - value = value(); - } - if (value && - Object.prototype.hasOwnProperty.call(value, "value") && - Object.prototype.hasOwnProperty.call(value, "options")) { - requestForm.append(key, value.value, value.options); - } - else { - requestForm.append(key, value); - } - }; - for (const formKey of Object.keys(formData)) { - const formValue = formData[formKey]; - if (Array.isArray(formValue)) { - for (let j = 0; j < formValue.length; j++) { - appendFormValue(formKey, formValue[j]); - } - } - else { - appendFormValue(formKey, formValue); - } - } - httpRequest.body = requestForm; - httpRequest.formData = undefined; - const contentType = httpRequest.headers.get("Content-Type"); - if (contentType && contentType.indexOf("multipart/form-data") !== -1) { - if (typeof requestForm.getBoundary === "function") { - httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); - } - else { - // browser will automatically apply a suitable content-type header - httpRequest.headers.remove("Content-Type"); - } - } - } - let body = httpRequest.body - ? typeof httpRequest.body === "function" - ? httpRequest.body() - : httpRequest.body - : undefined; - if (httpRequest.onUploadProgress && httpRequest.body) { - const onUploadProgress = httpRequest.onUploadProgress; - const uploadReportStream = new ReportTransform(onUploadProgress); - if (isReadableStream(body)) { - body.pipe(uploadReportStream); - } - else { - uploadReportStream.end(body); - } - body = uploadReportStream; - } - const platformSpecificRequestInit = await this.prepareRequest(httpRequest); - const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, - // the types for RequestInit are from the browser, which expects AbortSignal to - // have `reason` and `throwIfAborted`, but these don't exist on our polyfill - // for Node. - signal: abortController$1.signal, redirect: "manual" }, platformSpecificRequestInit); - let operationResponse; - try { - const response = await this.fetch(httpRequest.url, requestInit); - const headers = parseHeaders(response.headers); - const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || - httpRequest.streamResponseBody; - operationResponse = { - headers: headers, - request: httpRequest, - status: response.status, - readableStreamBody: streaming - ? response.body - : undefined, - bodyAsText: !streaming ? await response.text() : undefined, - }; - const onDownloadProgress = httpRequest.onDownloadProgress; - if (onDownloadProgress) { - const responseBody = response.body || undefined; - if (isReadableStream(responseBody)) { - const downloadReportStream = new ReportTransform(onDownloadProgress); - responseBody.pipe(downloadReportStream); - operationResponse.readableStreamBody = downloadReportStream; - } - else { - const length = parseInt(headers.get("Content-Length")) || undefined; - if (length) { - // Calling callback for non-stream response for consistency with browser - onDownloadProgress({ loadedBytes: length }); - } - } - } - await this.processRequest(operationResponse); - return operationResponse; - } - catch (error) { - const fetchError = error; - if (fetchError.code === "ENOTFOUND") { - throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); - } - else if (fetchError.type === "aborted") { - throw new abortController.AbortError("The operation was aborted."); - } - throw fetchError; - } - finally { - // clean up event listener - if (httpRequest.abortSignal && abortListener) { - let uploadStreamDone = Promise.resolve(); - if (isReadableStream(body)) { - uploadStreamDone = isStreamComplete(body); - } - let downloadStreamDone = Promise.resolve(); - if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { - downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1); - } - Promise.all([uploadStreamDone, downloadStreamDone]) - .then(() => { - var _a; - (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); - return; - }) - .catch((e) => { - logger.warning("Error when cleaning up abortListener on httpRequest", e); - }); - } - } - } - getOrCreateAgent(httpRequest) { - var _a; - const isHttps = isUrlHttps(httpRequest.url); - // At the moment, proxy settings and keepAlive are mutually - // exclusive because the 'tunnel' library currently lacks the - // ability to create a proxy with keepAlive turned on. - if (httpRequest.proxySettings) { - const { host, port, username, password } = httpRequest.proxySettings; - const key = `${host}:${port}:${username}:${password}`; - const proxyAgents = (_a = this.proxyAgentMap.get(key)) !== null && _a !== void 0 ? _a : {}; - let agent = getCachedAgent(isHttps, proxyAgents); - if (agent) { - return agent; - } - const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); - agent = tunnel.agent; - if (tunnel.isHttps) { - proxyAgents.httpsAgent = tunnel.agent; - } - else { - proxyAgents.httpAgent = tunnel.agent; - } - this.proxyAgentMap.set(key, proxyAgents); - return agent; - } - else if (httpRequest.keepAlive) { - let agent = getCachedAgent(isHttps, this.keepAliveAgents); - if (agent) { - return agent; - } - const agentOptions = { - keepAlive: httpRequest.keepAlive, - }; - if (isHttps) { - agent = this.keepAliveAgents.httpsAgent = new https__namespace.Agent(agentOptions); - } - else { - agent = this.keepAliveAgents.httpAgent = new http__namespace.Agent(agentOptions); - } - return agent; - } - else { - return isHttps ? https__namespace.globalAgent : http__namespace.globalAgent; - } - } - /** - * Uses `node-fetch` to perform the request. - */ - // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs - async fetch(input, init) { - return node_fetch__default["default"](input, init); - } +class Pipeline { /** - * Prepares a request based on the provided web resource. + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - */ - async prepareRequest(httpRequest) { - const requestInit = {}; - // Set the http(s) agent - requestInit.agent = this.getOrCreateAgent(httpRequest); - requestInit.compress = httpRequest.decompressResponse; - return requestInit; + constructor(factories, options = {}) { + this.factories = factories; + this.options = options; } /** - * Process an HTTP response. + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. */ - async processRequest(_operationResponse) { - /* no_op */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; } } - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The different levels of logs that can be used with the HttpPipelineLogger. - */ -exports.HttpPipelineLogLevel = void 0; -(function (HttpPipelineLogLevel) { - /** - * A log level that indicates that no logs will be logged. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; - /** - * An error log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; - /** - * A warning log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; - /** - * An information log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; -})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); - -// Copyright (c) Microsoft Corporation. /** - * Converts an OperationOptions to a RequestOptionsBase + * Creates a new Pipeline object with Credential provided. * - * @param opts - OperationOptions object to convert to RequestOptionsBase + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. */ -function operationOptionsToRequestOptionsBase(opts) { - const { requestOptions, tracingOptions } = opts, additionalOptions = tslib.__rest(opts, ["requestOptions", "tracingOptions"]); - let result = additionalOptions; - if (requestOptions) { - result = Object.assign(Object.assign({}, result), requestOptions); +function newPipeline(credential, pipelineOptions = {}) { + if (!credential) { + credential = new AnonymousCredential(); } - if (tracingOptions) { - result.tracingContext = tracingOptions.tracingContext; - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - result.spanOptions = tracingOptions === null || tracingOptions === void 0 ? void 0 : tracingOptions.spanOptions; + const pipeline = new Pipeline([], pipelineOptions); + pipeline._credential = credential; + return pipeline; +} +function processDownlevelPipeline(pipeline) { + const knownFactoryFunctions = [ + isAnonymousCredential, + isStorageSharedKeyCredential, + isCoreHttpBearerTokenFactory, + isStorageBrowserPolicyFactory, + isStorageRetryPolicyFactory, + isStorageTelemetryPolicyFactory, + isCoreHttpPolicyFactory, + ]; + if (pipeline.factories.length) { + const novelFactories = pipeline.factories.filter((factory) => { + return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory)); + }); + if (novelFactories.length) { + const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); + // if there are any left over, wrap in a requestPolicyFactoryPolicy + return { + wrappedPolicies: coreHttpCompat.createRequestPolicyFactoryPolicy(novelFactories), + afterRetry: hasInjector, + }; + } } - return result; + return undefined; } - -// Copyright (c) Microsoft Corporation. -/** - * The base class from which all request policies derive. - */ -class BaseRequestPolicy { - /** - * The main method to implement that manipulates a request/response. - */ - constructor( - /** - * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. - */ - _nextPolicy, - /** - * The options that can be passed to a given request policy. - */ - _options) { - this._nextPolicy = _nextPolicy; - this._options = _options; - } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return this._options.shouldLog(logLevel); - } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meat the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - this._options.log(logLevel, message); +function getCoreClientOptions(pipeline) { + var _a; + const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = tslib.__rest(_b, ["httpClient"]); + let httpClient = pipeline._coreHttpClient; + if (!httpClient) { + httpClient = v1Client ? coreHttpCompat.convertHttpClient(v1Client) : getCachedDefaultHttpClient(); + pipeline._coreHttpClient = httpClient; + } + let corePipeline = pipeline._corePipeline; + if (!corePipeline) { + const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`; + const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix + ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + corePipeline = coreClient.createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: { + additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + logger: logger.info, + }, userAgentOptions: { + userAgentPrefix, + }, serializationOptions: { + stringifyXML: coreXml.stringifyXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + }, deserializationOptions: { + parseXML: coreXml.parseXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + } })); + corePipeline.removePolicy({ phase: "Retry" }); + corePipeline.removePolicy({ name: coreRestPipeline.decompressResponsePolicyName }); + corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" }); + corePipeline.addPolicy(storageBrowserPolicy()); + const downlevelResults = processDownlevelPipeline(pipeline); + if (downlevelResults) { + corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : undefined); + } + const credential = getCredentialFromPipeline(pipeline); + if (coreAuth.isTokenCredential(credential)) { + corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential, + scopes: (_a = restOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + pipeline._corePipeline = corePipeline; + } + return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline }); +} +function getCredentialFromPipeline(pipeline) { + // see if we squirreled one away on the type itself + if (pipeline._credential) { + return pipeline._credential; + } + // if it came from another package, loop over the factories and look for one like before + let credential = new AnonymousCredential(); + for (const factory of pipeline.factories) { + if (coreAuth.isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + credential = factory.credential; + } + else if (isStorageSharedKeyCredential(factory)) { + return factory; + } + } + return credential; +} +function isStorageSharedKeyCredential(factory) { + if (factory instanceof StorageSharedKeyCredential) { + return true; } + return factory.constructor.name === "StorageSharedKeyCredential"; } -/** - * Optional properties that can be used when creating a RequestPolicy. - */ -class RequestPolicyOptions { - constructor(_logger) { - this._logger = _logger; +function isAnonymousCredential(factory) { + if (factory instanceof AnonymousCredential) { + return true; } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return (!!this._logger && - logLevel !== exports.HttpPipelineLogLevel.OFF && - logLevel <= this._logger.minimumLogLevel); + return factory.constructor.name === "AnonymousCredential"; +} +function isCoreHttpBearerTokenFactory(factory) { + return coreAuth.isTokenCredential(factory.credential); +} +function isStorageBrowserPolicyFactory(factory) { + if (factory instanceof StorageBrowserPolicyFactory) { + return true; } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meet the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - if (this._logger && this.shouldLog(logLevel)) { - this._logger.log(logLevel, message); - } + return factory.constructor.name === "StorageBrowserPolicyFactory"; +} +function isStorageRetryPolicyFactory(factory) { + if (factory instanceof StorageRetryPolicyFactory) { + return true; } + return factory.constructor.name === "StorageRetryPolicyFactory"; } - -// Copyright (c) Microsoft Corporation. -// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed -// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 -// By creating a new copy of the settings each time we instantiate the parser, -// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. -const xml2jsDefaultOptionsV2 = { - explicitCharkey: false, - trim: false, - normalize: false, - normalizeTags: false, - attrkey: XML_ATTRKEY, - explicitArray: true, - ignoreAttrs: false, - mergeAttrs: false, - explicitRoot: true, - validator: undefined, - xmlns: false, - explicitChildren: false, - preserveChildrenOrder: false, - childkey: "$$", - charsAsChildren: false, - includeWhiteChars: false, - async: false, - strict: true, - attrNameProcessors: undefined, - attrValueProcessors: undefined, - tagNameProcessors: undefined, - valueProcessors: undefined, - rootName: "root", - xmldec: { - version: "1.0", - encoding: "UTF-8", - standalone: true, - }, - doctype: undefined, - renderOpts: { - pretty: true, - indent: " ", - newline: "\n", - }, - headless: false, - chunkSize: 10000, - emptyTag: "", - cdata: false, -}; -// The xml2js settings for general XML parsing operations. -const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); -xml2jsParserSettings.explicitArray = false; -// The xml2js settings for general XML building operations. -const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); -xml2jsBuilderSettings.explicitArray = false; -xml2jsBuilderSettings.renderOpts = { - pretty: false, -}; -/** - * Converts given JSON object to XML string - * @param obj - JSON object to be converted into XML string - * @param opts - Options that govern the parsing of given JSON object - */ -function stringifyXML(obj, opts = {}) { - var _a; - xml2jsBuilderSettings.rootName = opts.rootName; - xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const builder = new xml2js__namespace.Builder(xml2jsBuilderSettings); - return builder.buildObject(obj); +function isStorageTelemetryPolicyFactory(factory) { + return factory.constructor.name === "TelemetryPolicyFactory"; } -/** - * Converts given XML string into JSON - * @param str - String containing the XML content to be parsed into JSON - * @param opts - Options that govern the parsing of given xml string - */ -function parseXML(str, opts = {}) { - var _a; - xml2jsParserSettings.explicitRoot = !!opts.includeRoot; - xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const xmlParser = new xml2js__namespace.Parser(xml2jsParserSettings); - return new Promise((resolve, reject) => { - if (!str) { - reject(new Error("Document is empty")); - } - else { - xmlParser.parseString(str, (err, res) => { - if (err) { - reject(err); - } - else { - resolve(res); - } - }); - } +function isInjectorPolicyFactory(factory) { + return factory.constructor.name === "InjectorPolicyFactory"; +} +function isCoreHttpPolicyFactory(factory) { + const knownPolicies = [ + "GenerateClientRequestIdPolicy", + "TracingPolicy", + "LogPolicy", + "ProxyPolicy", + "DisableResponseDecompressionPolicy", + "KeepAlivePolicy", + "DeserializationPolicy", + ]; + const mockHttpClient = { + sendRequest: async (request) => { + return { + request, + headers: request.headers.clone(), + status: 500, + }; + }, + }; + const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, + }; + const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions); + const policyName = policyInstance.constructor.name; + // bundlers sometimes add a custom suffix to the class name to make it unique + return knownPolicies.some((knownPolicyName) => { + return policyName.startsWith(knownPolicyName); }); } -// Copyright (c) Microsoft Corporation. -/** - * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they - * pass through the HTTP pipeline. +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -function deserializationPolicy(deserializationContentTypes, parsingOptions) { - return { - create: (nextPolicy, options) => { - return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); +const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging", + }, + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics", + }, + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics", + }, + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule", + }, + }, + }, + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String", + }, + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + }, + }, }, - }; -} -const defaultJsonContentTypes = ["application/json", "text/json"]; -const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; -const DefaultDeserializationOptions = { - expectedContentTypes: { - json: defaultJsonContentTypes, - xml: defaultXmlContentTypes, }, }; -/** - * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the - * HTTP pipeline. - */ -class DeserializationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { - var _a; - super(nextPolicy, requestPolicyOptions); - this.jsonContentTypes = - (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; - this.xmlContentTypes = - (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; - this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - } - async sendRequest(request) { - return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { - xmlCharKey: this.xmlCharKey, - })); - } -} -function getOperationResponse(parsedResponse) { - let result; - const request = parsedResponse.request; - const operationSpec = request.operationSpec; - if (operationSpec) { - const operationResponseGetter = request.operationResponseGetter; - if (!operationResponseGetter) { - result = operationSpec.responses[parsedResponse.status]; - } - else { - result = operationResponseGetter(operationSpec, parsedResponse); - } - } - return result; -} -function shouldDeserializeResponse(parsedResponse) { - const shouldDeserialize = parsedResponse.request.shouldDeserialize; - let result; - if (shouldDeserialize === undefined) { - result = true; - } - else if (typeof shouldDeserialize === "boolean") { - result = shouldDeserialize; - } - else { - result = shouldDeserialize(parsedResponse); - } - return result; -} -/** - * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. - * @param jsonContentTypes - Response content types to parse the body as JSON. - * @param xmlContentTypes - Response content types to parse the body as XML. - * @param response - HTTP Response from the pipeline. - * @param options - Options to the serializer, mostly for configuring the XML parser if needed. - * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. - */ -function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { - if (!shouldDeserializeResponse(parsedResponse)) { - return parsedResponse; - } - const operationSpec = parsedResponse.request.operationSpec; - if (!operationSpec || !operationSpec.responses) { - return parsedResponse; - } - const responseSpec = getOperationResponse(parsedResponse); - const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); - if (error) { - throw error; - } - else if (shouldReturnResponse) { - return parsedResponse; - } - // An operation response spec does exist for current status code, so - // use it to deserialize the response. - if (responseSpec) { - if (responseSpec.bodyMapper) { - let valueToDeserialize = parsedResponse.parsedBody; - if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { - valueToDeserialize = - typeof valueToDeserialize === "object" - ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] - : []; - } - try { - parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); - } - catch (innerError) { - const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); - throw restError; - } - } - else if (operationSpec.httpMethod === "HEAD") { - // head methods never have a body, but we return a boolean to indicate presence/absence of the resource - parsedResponse.parsedBody = response.status >= 200 && response.status < 300; - } - if (responseSpec.headersMapper) { - parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders", options); - } - } - return parsedResponse; - }); -} -function isOperationSpecEmpty(operationSpec) { - const expectedStatusCodes = Object.keys(operationSpec.responses); - return (expectedStatusCodes.length === 0 || - (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); -} -function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { - var _a; - const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; - const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) - ? isSuccessByStatus - : !!responseSpec; - if (isExpectedStatusCode) { - if (responseSpec) { - if (!responseSpec.isError) { - return { error: null, shouldReturnResponse: false }; - } - } - else { - return { error: null, shouldReturnResponse: false }; - } - } - const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; - const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || - parsedResponse.request.streamResponseBody; - const initialErrorMessage = streaming - ? `Unexpected status code: ${parsedResponse.status}` - : parsedResponse.bodyAsText; - const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); - // If the item failed but there's no error spec or default spec to deserialize the error, - // we should fail so we just throw the parsed response - if (!errorResponseSpec) { - throw error; - } - const defaultBodyMapper = errorResponseSpec.bodyMapper; - const defaultHeadersMapper = errorResponseSpec.headersMapper; - try { - // If error response has a body, try to deserialize it using default body mapper. - // Then try to extract error code & message from it - if (parsedResponse.parsedBody) { - const parsedBody = parsedResponse.parsedBody; - let parsedError; - if (defaultBodyMapper) { - let valueToDeserialize = parsedBody; - if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { - valueToDeserialize = - typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; - } - parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); - } - const internalError = parsedBody.error || parsedError || parsedBody; - error.code = internalError.code; - if (internalError.message) { - error.message = internalError.message; - } - if (defaultBodyMapper) { - error.response.parsedBody = parsedError; - } - } - // If error response has headers, try to deserialize it using default header mapper - if (parsedResponse.headers && defaultHeadersMapper) { - error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders"); - } - } - catch (defaultError) { - error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; - } - return { error, shouldReturnResponse: false }; -} -function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { - var _a; - const errorHandler = (err) => { - const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; - const errCode = err.code || RestError.PARSE_ERROR; - const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); - return Promise.reject(e); - }; - const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || - operationResponse.request.streamResponseBody; - if (!streaming && operationResponse.bodyAsText) { - const text = operationResponse.bodyAsText; - const contentType = operationResponse.headers.get("Content-Type") || ""; - const contentComponents = !contentType - ? [] - : contentType.split(";").map((component) => component.toLowerCase()); - if (contentComponents.length === 0 || - contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { - return new Promise((resolve) => { - operationResponse.parsedBody = JSON.parse(text); - resolve(operationResponse); - }).catch(errorHandler); - } - else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { - return parseXML(text, opts) - .then((body) => { - operationResponse.parsedBody = body; - return operationResponse; - }) - .catch(errorHandler); - } - } - return Promise.resolve(operationResponse); -} - -// Copyright (c) Microsoft Corporation. -/** - * By default, HTTP connections are maintained for future requests. - */ -const DefaultKeepAliveOptions = { - enable: true, -}; -/** - * Creates a policy that controls whether HTTP connections are maintained on future requests. - * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. - * @returns An instance of the {@link KeepAlivePolicy} - */ -function keepAlivePolicy(keepAliveOptions) { - return { - create: (nextPolicy, options) => { - return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); +const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String", + }, + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean", + }, + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean", + }, + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean", + }, + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, }, - }; -} -/** - * KeepAlivePolicy is a policy used to control keep alive settings for every request. - */ -class KeepAlivePolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - * - * @param nextPolicy - - * @param options - - * @param keepAliveOptions - - */ - constructor(nextPolicy, options, keepAliveOptions) { - super(nextPolicy, options); - this.keepAliveOptions = keepAliveOptions; - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.keepAlive = this.keepAliveOptions.enable; - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Methods that are allowed to follow redirects 301 and 302 - */ -const allowedRedirect = ["GET", "HEAD"]; -const DefaultRedirectOptions = { - handleRedirects: true, - maxRetries: 20, + }, }; -/** - * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. - * @param maximumRetries - Maximum number of redirects to follow. - * @returns An instance of the {@link RedirectPolicy} - */ -function redirectPolicy(maximumRetries = 20) { - return { - create: (nextPolicy, options) => { - return new RedirectPolicy(nextPolicy, options, maximumRetries); +const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + days: { + constraints: { + InclusiveMinimum: 1, + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number", + }, + }, }, - }; -} -/** - * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. - */ -class RedirectPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, maxRetries = 20) { - super(nextPolicy, options); - this.maxRetries = maxRetries; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request) - .then((response) => handleRedirect(this, response, 0)); - } -} -function handleRedirect(policy, response, currentRetries) { - const { request, status } = response; - const locationHeader = response.headers.get("location"); - if (locationHeader && - (status === 300 || - (status === 301 && allowedRedirect.includes(request.method)) || - (status === 302 && allowedRedirect.includes(request.method)) || - (status === 303 && request.method === "POST") || - status === 307) && - (!policy.maxRetries || currentRetries < policy.maxRetries)) { - const builder = URLBuilder.parse(request.url); - builder.setPath(locationHeader); - request.url = builder.toString(); - // POST request with Status code 303 should be converted into a - // redirected GET request if the redirect url is present in the location header - if (status === 303) { - request.method = "GET"; - delete request.body; - } - return policy._nextPolicy - .sendRequest(request) - .then((res) => handleRedirect(policy, res, currentRetries + 1)); - } - return Promise.resolve(response); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const DEFAULT_CLIENT_RETRY_COUNT = 3; -// intervals are in ms -const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; -const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; -const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; -function isNumber(n) { - return typeof n === "number"; -} -/** - * @internal - * Determines if the operation should be retried. - * - * @param retryLimit - Specifies the max number of retries. - * @param predicate - Initial chekck on whether to retry based on given responses or errors - * @param retryData - The retry data. - * @returns True if the operation qualifies for a retry; false otherwise. - */ -function shouldRetry(retryLimit, predicate, retryData, response, error) { - if (!predicate(response, error)) { - return false; - } - return retryData.retryCount < retryLimit; -} -/** - * @internal - * Updates the retry data for the next attempt. - * - * @param retryOptions - specifies retry interval, and its lower bound and upper bound. - * @param retryData - The retry data. - * @param err - The operation"s error, if any. - */ -function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { - if (err) { - if (retryData.error) { - err.innerError = retryData.error; - } - retryData.error = err; - } - // Adjust retry count - retryData.retryCount++; - // Adjust retry interval - let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; - const boundedRandDelta = retryOptions.retryInterval * 0.8 + - Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); - incrementDelta *= boundedRandDelta; - retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); - return retryData; -} - -// Copyright (c) Microsoft Corporation. -/** - * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. - * @param retryCount - Maximum number of retries. - * @param retryInterval - Base time between retries. - * @param maxRetryInterval - Maximum time to wait between retries. - */ -function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { - return { - create: (nextPolicy, options) => { - return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); + }, +}; +const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String", + }, + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean", + }, + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + }, + }, }, - }; -} -/** - * Describes the Retry Mode type. Currently supporting only Exponential. - */ -exports.RetryMode = void 0; -(function (RetryMode) { - /** - * Currently supported retry mode. - * Each time a retry happens, it will take exponentially more time than the last time. - */ - RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; -})(exports.RetryMode || (exports.RetryMode = {})); -const DefaultRetryOptions = { - maxRetries: DEFAULT_CLIENT_RETRY_COUNT, - retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, - maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, + }, }; -/** - * Instantiates a new "ExponentialRetryPolicyFilter" instance. - */ -class ExponentialRetryPolicy extends BaseRequestPolicy { - /** - * @param nextPolicy - The next RequestPolicy in the pipeline chain. - * @param options - The options for this RequestPolicy. - * @param retryCount - The client retry count. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - */ - constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) - ? maxRetryInterval - : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .then((response) => retry$1(this, request, response)) - .catch((error) => retry$1(this, request, error.response, undefined, error)); - } -} -async function retry$1(policy, request, response, retryData, requestError) { - function shouldPolicyRetry(responseParam) { - const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; - if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { - return false; - } - if (statusCode === undefined || - (statusCode < 500 && statusCode !== 408) || - statusCode === 501 || - statusCode === 505) { - return false; - } - return true; - } - retryData = updateRetryData({ - retryInterval: policy.retryInterval, - minRetryInterval: 0, - maxRetryInterval: policy.maxRetryInterval, - }, retryData, requestError); - const isAborted = request.abortSignal && request.abortSignal.aborted; - if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { - logger.info(`Retrying request in ${retryData.retryInterval}`); - try { - await coreUtil.delay(retryData.retryInterval); - const res = await policy._nextPolicy.sendRequest(request.clone()); - return retry$1(policy, request, res, retryData); - } - catch (err) { - return retry$1(policy, request, response, retryData, err); - } - } - else if (isAborted || requestError || !response) { - // If the operation failed in the end, return all errors instead of just the last one - const err = retryData.error || - new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); - throw err; - } - else { - return response; - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Creates a policy that logs information about the outgoing request and the incoming responses. - * @param loggingOptions - Logging options. - * @returns An instance of the {@link LogPolicy} - */ -function logPolicy(loggingOptions = {}) { - return { - create: (nextPolicy, options) => { - return new LogPolicy(nextPolicy, options, loggingOptions); +const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String", + }, + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String", + }, + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String", + }, + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String", + }, + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0, + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number", + }, + }, }, - }; -} -/** - * A policy that logs information about the outgoing request and the incoming responses. - */ -class LogPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { - super(nextPolicy, options); - this.logger = logger$1; - this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - get allowedHeaderNames() { - return this.sanitizer.allowedHeaderNames; - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - set allowedHeaderNames(allowedHeaderNames) { - this.sanitizer.allowedHeaderNames = allowedHeaderNames; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - get allowedQueryParameters() { - return this.sanitizer.allowedQueryParameters; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - set allowedQueryParameters(allowedQueryParameters) { - this.sanitizer.allowedQueryParameters = allowedQueryParameters; - } - sendRequest(request) { - if (!this.logger.enabled) - return this._nextPolicy.sendRequest(request); - this.logRequest(request); - return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); - } - logRequest(request) { - this.logger(`Request: ${this.sanitizer.sanitize(request)}`); - } - logResponse(response) { - this.logger(`Response status code: ${response.status}`); - this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); - return response; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Get the path to this parameter's value as a dotted string (a.b.c). - * @param parameter - The parameter to get the path string for. - * @returns The path to this parameter's value as a dotted string. - */ -function getPathStringFromParameter(parameter) { - return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); -} -function getPathStringFromParameterPath(parameterPath, mapper) { - let result; - if (typeof parameterPath === "string") { - result = parameterPath; - } - else if (Array.isArray(parameterPath)) { - result = parameterPath.join("."); - } - else { - result = mapper.serializedName; - } - return result; -} - -// Copyright (c) Microsoft Corporation. -/** - * Gets the list of status codes for streaming responses. - * @internal - */ -function getStreamResponseStatusCodes(operationSpec) { - const result = new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; - if (operationResponse.bodyMapper && - operationResponse.bodyMapper.type.name === MapperType.Stream) { - result.add(Number(statusCode)); - } - } - return result; -} - -// Copyright (c) Microsoft Corporation. -function getDefaultUserAgentKey() { - return Constants.HeaderConstants.USER_AGENT; -} -function getPlatformSpecificData() { - const runtimeInfo = { - key: "Node", - value: process.version, - }; - const osInfo = { - key: "OS", - value: `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`, - }; - return [runtimeInfo, osInfo]; -} - -// Copyright (c) Microsoft Corporation. -function getRuntimeInfo() { - const msRestRuntime = { - key: "core-http", - value: Constants.coreHttpVersion, - }; - return [msRestRuntime]; -} -function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { - return telemetryInfo - .map((info) => { - const value = info.value ? `${valueSeparator}${info.value}` : ""; - return `${info.key}${value}`; - }) - .join(keySeparator); -} -const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; -/** - * The default approach to generate user agents. - * Uses static information from this package, plus system information available from the runtime. - */ -function getDefaultUserAgentValue() { - const runtimeInfo = getRuntimeInfo(); - const platformSpecificData = getPlatformSpecificData(); - const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); - return userAgent; -} -/** - * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. - * @param userAgentData - Telemetry information. - * @returns A new {@link UserAgentPolicy}. - */ -function userAgentPolicy(userAgentData) { - const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null - ? getDefaultUserAgentKey() - : userAgentData.key; - const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null - ? getDefaultUserAgentValue() - : userAgentData.value; - return { - create: (nextPolicy, options) => { - return new UserAgentPolicy(nextPolicy, options, key, value); + }, +}; +const StaticWebsite = { + serializedName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean", + }, + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", + type: { + name: "String", + }, + }, + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", + type: { + name: "String", + }, + }, + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", + type: { + name: "String", + }, + }, }, - }; -} -/** - * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. - */ -class UserAgentPolicy extends BaseRequestPolicy { - constructor(_nextPolicy, _options, headerKey, headerValue) { - super(_nextPolicy, _options); - this._nextPolicy = _nextPolicy; - this._options = _options; - this.headerKey = headerKey; - this.headerValue = headerValue; - } - sendRequest(request) { - this.addUserAgentHeader(request); - return this._nextPolicy.sendRequest(request); - } - /** - * Adds the user agent header to the outgoing request. - */ - addUserAgentHeader(request) { - if (!request.headers) { - request.headers = new HttpHeaders(); - } - if (!request.headers.get(this.headerKey) && this.headerValue) { - request.headers.set(this.headerKey, this.headerValue); - } - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The format that will be used to join an array of values together for a query parameter value. - */ -exports.QueryCollectionFormat = void 0; -(function (QueryCollectionFormat) { - /** - * CSV: Each pair of segments joined by a single comma. - */ - QueryCollectionFormat["Csv"] = ","; - /** - * SSV: Each pair of segments joined by a single space character. - */ - QueryCollectionFormat["Ssv"] = " "; - /** - * TSV: Each pair of segments joined by a single tab character. - */ - QueryCollectionFormat["Tsv"] = "\t"; - /** - * Pipes: Each pair of segments joined by a single pipe character. - */ - QueryCollectionFormat["Pipes"] = "|"; - /** - * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` - */ - QueryCollectionFormat["Multi"] = "Multi"; -})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); - -// Copyright (c) Microsoft Corporation. -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry + }, }; -/** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token - */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await coreUtil.delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. - */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; +const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", + type: { + name: "String", + }, + }, + code: { + serializedName: "Code", + xmlName: "Code", + type: { + name: "String", + }, + }, }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, +}; +const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", + type: { + name: "Composite", + className: "BlobServiceStatistics", + modelProperties: { + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + }, + }, }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, +}; +const GeoReplication = { + serializedName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + modelProperties: { + status: { + serializedName: "Status", + required: true, + xmlName: "Status", + type: { + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"], + }, + }, + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123", + }, + }, }, - }; - /** - * Starts a refresh job or returns the existing job if one is already - * running. - */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; - } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -function bearerTokenAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - const getToken = createTokenCycler(credential, scopes /* , options */); - class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const { token } = await getToken({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, + }, +}; +const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String", }, - }); - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - return this._nextPolicy.sendRequest(webResource); - } - } - return { - create: (nextPolicy, options) => { - return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String", + }, + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String", + }, + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number", + }, + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem", + }, + }, + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, }, - }; -} - -// Copyright (c) Microsoft Corporation. -/** - * Returns a request policy factory that can be used to create an instance of - * {@link DisableResponseDecompressionPolicy}. - */ -function disableResponseDecompressionPolicy() { - return { - create: (nextPolicy, options) => { - return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, +}; +const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", + type: { + name: "Composite", + className: "ContainerItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String", + }, + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean", + }, + }, + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String", + }, + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties", + }, + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, }, - }; -} -/** - * A policy to disable response decompression according to Accept-Encoding header - * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding - */ -class DisableResponseDecompressionPolicy extends BaseRequestPolicy { - /** - * Creates an instance of DisableResponseDecompressionPolicy. - * - * @param nextPolicy - - * @param options - - */ - // The parent constructor is protected. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.decompressResponse = false; - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Creates a policy that assigns a unique request id to outgoing requests. - * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. - */ -function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { - return { - create: (nextPolicy, options) => { - return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, +}; +const ContainerProperties = { + serializedName: "ContainerProperties", + type: { + name: "Composite", + className: "ContainerProperties", + modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String", + }, + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", + type: { + name: "Boolean", + }, + }, + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", + type: { + name: "Boolean", + }, + }, + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", + type: { + name: "String", + }, + }, + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", + type: { + name: "Boolean", + }, + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123", + }, + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number", + }, + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", + type: { + name: "Boolean", + }, + }, }, - }; -} -class GenerateClientRequestIdPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _requestIdHeaderName) { - super(nextPolicy, options); - this._requestIdHeaderName = _requestIdHeaderName; - } - sendRequest(request) { - if (!request.headers.contains(this._requestIdHeaderName)) { - request.headers.set(this._requestIdHeaderName, request.requestId); - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -let cachedHttpClient; -function getCachedDefaultHttpClient() { - if (!cachedHttpClient) { - cachedHttpClient = new NodeFetchHttpClient(); - } - return cachedHttpClient; -} - -// Copyright (c) Microsoft Corporation. -function ndJsonPolicy() { - return { - create: (nextPolicy, options) => { - return new NdJsonPolicy(nextPolicy, options); + }, +}; +const KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "String", + }, + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String", + }, + }, }, - }; -} -/** - * NdJsonPolicy that formats a JSON array as newline-delimited JSON - */ -class NdJsonPolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends a request. - */ - async sendRequest(request) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Stores the patterns specified in NO_PROXY environment variable. - * @internal - */ -const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); -function loadEnvironmentProxyValue() { - if (!process) { - return undefined; - } - const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); - const allProxy = getEnvironmentValue(Constants.ALL_PROXY); - const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); - return httpsProxy || allProxy || httpProxy; -} -/** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 - */ -function isBypassed(uri, noProxyList, bypassedMap) { - if (noProxyList.length === 0) { - return false; - } - const host = URLBuilder.parse(uri).getHost(); - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); - } - let isBypassedFlag = false; - for (const pattern of noProxyList) { - if (pattern[0] === ".") { - // This should match either domain it self or any subdomain or host - // .foo.com will match foo.com it self or *.foo.com - if (host.endsWith(pattern)) { - isBypassedFlag = true; - } - else { - if (host.length === pattern.length - 1 && host === pattern.slice(1)) { - isBypassedFlag = true; - } - } - } - else { - if (host === pattern) { - isBypassedFlag = true; - } - } - } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); - return isBypassedFlag; -} -/** - * @internal - */ -function loadNoProxy() { - const noProxy = getEnvironmentValue(Constants.NO_PROXY); - noProxyListLoaded = true; - if (noProxy) { - return noProxy - .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); - } - return []; -} -/** - * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. - * @param proxyUrl - URL of the proxy - * @returns The default proxy settings, or undefined. - */ -function getDefaultProxySettings(proxyUrl) { - if (!proxyUrl) { - proxyUrl = loadEnvironmentProxyValue(); - if (!proxyUrl) { - return undefined; - } - } - const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); - const parsedUrl = URLBuilder.parse(urlWithoutAuth); - const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; - return { - host: schema + parsedUrl.getHost(), - port: Number.parseInt(parsedUrl.getPort() || "80"), - username, - password, - }; -} -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -function proxyPolicy(proxySettings, options) { - if (!proxySettings) { - proxySettings = getDefaultProxySettings(); - } - if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); - } - return { - create: (nextPolicy, requestPolicyOptions) => { - return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, +}; +const UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", + type: { + name: "String", + }, + }, + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", + type: { + name: "String", + }, + }, + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", + type: { + name: "String", + }, + }, + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", + type: { + name: "String", + }, + }, + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", + type: { + name: "String", + }, + }, + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", + type: { + name: "String", + }, + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String", + }, + }, }, - }; -} -function extractAuthFromUrl(url) { - const atIndex = url.indexOf("@"); - if (atIndex === -1) { - return { urlWithoutAuth: url }; - } - const schemeIndex = url.indexOf("://"); - const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; - const auth = url.substring(authStart, atIndex); - const colonIndex = auth.indexOf(":"); - const hasPassword = colonIndex !== -1; - const username = hasPassword ? auth.substring(0, colonIndex) : auth; - const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; - const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); - return { - username, - password, - urlWithoutAuth, - }; -} -class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, proxySettings, customNoProxyList) { - super(nextPolicy, options); - this.proxySettings = proxySettings; - this.customNoProxyList = customNoProxyList; - } - sendRequest(request) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { - request.proxySettings = this.proxySettings; - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -function rpRegistrationPolicy(retryTimeout = 30) { - return { - create: (nextPolicy, options) => { - return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, +}; +const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String", + }, + }, + where: { + serializedName: "Where", + required: true, + xmlName: "Where", + type: { + name: "String", + }, + }, + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem", + }, + }, + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, }, - }; -} -class RPRegistrationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _retryTimeout = 30) { - super(nextPolicy, options); - this._retryTimeout = _retryTimeout; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .then((response) => registerIfNeeded(this, request, response)); - } -} -function registerIfNeeded(policy, request, response) { - if (response.status === 409) { - const rpName = checkRPNotRegisteredError(response.bodyAsText); - if (rpName) { - const urlPrefix = extractSubscriptionUrl(request.url); - return (registerRP(policy, urlPrefix, rpName, request) - // Autoregistration of ${provider} failed for some reason. We will not return this error - // instead will return the initial response with 409 status code back to the user. - // do nothing here as we are returning the original response at the end of this method. - .catch(() => false) - .then((registrationStatus) => { - if (registrationStatus) { - // Retry the original request. We have to change the x-ms-client-request-id - // otherwise Azure endpoint will return the initial 409 (cached) response. - request.headers.set("x-ms-client-request-id", generateUuid()); - return policy._nextPolicy.sendRequest(request.clone()); - } - return response; - })); - } - } - return Promise.resolve(response); -} -/** - * Reuses the headers of the original request and url (if specified). - * @param originalRequest - The original request - * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. - * @returns A new request object with desired headers. - */ -function getRequestEssentials(originalRequest, reuseUrlToo = false) { - const reqOptions = originalRequest.clone(); - if (reuseUrlToo) { - reqOptions.url = originalRequest.url; - } - // We have to change the x-ms-client-request-id otherwise Azure endpoint - // will return the initial 409 (cached) response. - reqOptions.headers.set("x-ms-client-request-id", generateUuid()); - // Set content-type to application/json - reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); - return reqOptions; -} -/** - * Validates the error code and message associated with 409 response status code. If it matches to that of - * RP not registered then it returns the name of the RP else returns undefined. - * @param body - The response body received after making the original request. - * @returns The name of the RP if condition is satisfied else undefined. - */ -function checkRPNotRegisteredError(body) { - let result, responseBody; - if (body) { - try { - responseBody = JSON.parse(body); - } - catch (err) { - // do nothing; - } - if (responseBody && - responseBody.error && - responseBody.error.message && - responseBody.error.code && - responseBody.error.code === "MissingSubscriptionRegistration") { - const matchRes = responseBody.error.message.match(/.*'(.*)'/i); - if (matchRes) { - result = matchRes.pop(); - } - } - } - return result; -} -/** - * Extracts the first part of the URL, just after subscription: - * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ - * @param url - The original request url - * @returns The url prefix as explained above. - */ -function extractSubscriptionUrl(url) { - let result; - const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); - if (matchRes && matchRes[0]) { - result = matchRes[0]; - } - else { - throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); - } - return result; -} -/** - * Registers the given provider. - * @param policy - The RPRegistrationPolicy this function is being called against. - * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ - * @param provider - The provider name to be registered. - * @param originalRequest - The original request sent by the user that returned a 409 response - * with a message that the provider is not registered. - */ -async function registerRP(policy, urlPrefix, provider, originalRequest) { - const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; - const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; - const reqOptions = getRequestEssentials(originalRequest); - reqOptions.method = "POST"; - reqOptions.url = postUrl; - const response = await policy._nextPolicy.sendRequest(reqOptions); - if (response.status !== 200) { - throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); - } - return getRegistrationStatus(policy, getUrl, originalRequest); -} -/** - * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. - * Polling will happen till the registrationState property of the response body is "Registered". - * @param policy - The RPRegistrationPolicy this function is being called against. - * @param url - The request url for polling - * @param originalRequest - The original request sent by the user that returned a 409 response - * with a message that the provider is not registered. - * @returns True if RP Registration is successful. - */ -async function getRegistrationStatus(policy, url, originalRequest) { - const reqOptions = getRequestEssentials(originalRequest); - reqOptions.url = url; - reqOptions.method = "GET"; - const res = await policy._nextPolicy.sendRequest(reqOptions); - const obj = res.parsedBody; - if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { - return true; - } - else { - await coreUtil.delay(policy._retryTimeout * 1000); - return getRegistrationStatus(policy, url, originalRequest); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. - * @param authenticationProvider - The authentication provider. - * @returns An instance of the {@link SigningPolicy}. - */ -function signingPolicy(authenticationProvider) { - return { - create: (nextPolicy, options) => { - return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, +}; +const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String", + }, + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + type: { + name: "String", + }, + }, + tags: { + serializedName: "Tags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + }, + }, }, - }; -} -/** - * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. - */ -class SigningPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, authenticationProvider) { - super(nextPolicy, options); - this.authenticationProvider = authenticationProvider; - } - signRequest(request) { - return this.authenticationProvider.signRequest(request); - } - sendRequest(request) { - return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". - * @param retryCount - Maximum number of retries. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - * @returns An instance of the {@link SystemErrorRetryPolicy} - */ -function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { - return { - create: (nextPolicy, options) => { - return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, +}; +const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag", + }, + }, + }, + }, }, - }; -} -/** - * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". - * @param retryCount - The client retry count. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - */ -class SystemErrorRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.minRetryInterval = isNumber(minRetryInterval) - ? minRetryInterval - : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) - ? maxRetryInterval - : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .catch((error) => retry(this, request, error.response, error)); - } -} -async function retry(policy, request, operationResponse, err, retryData) { - retryData = updateRetryData(policy, retryData, err); - function shouldPolicyRetry(_response, error) { - if (error && - error.code && - (error.code === "ETIMEDOUT" || - error.code === "ESOCKETTIMEDOUT" || - error.code === "ECONNREFUSED" || - error.code === "ECONNRESET" || - error.code === "ENOENT")) { - return true; - } - return false; - } - if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { - // If previous operation ended with an error and the policy allows a retry, do that - try { - await coreUtil.delay(retryData.retryInterval); - return policy._nextPolicy.sendRequest(request.clone()); - } - catch (nestedErr) { - return retry(policy, request, operationResponse, nestedErr, retryData); - } - } - else { - if (err) { - // If the operation failed in the end, return all errors instead of just the last one - return Promise.reject(retryData.error); - } - return operationResponse; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Maximum number of retries for the throttling retry policy - */ -const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; - -// Copyright (c) Microsoft Corporation. -const StatusCodes = Constants.HttpConstants.StatusCodes; -/** - * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. - * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. - * - * To learn more, please refer to - * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, - * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and - * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors - * @returns - */ -function throttlingRetryPolicy() { - return { - create: (nextPolicy, options) => { - return new ThrottlingRetryPolicy(nextPolicy, options); + }, +}; +const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", + type: { + name: "String", + }, + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String", + }, + }, }, - }; -} -const StandardAbortMessage = "The operation was aborted."; -/** - * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. - * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. - * - * To learn more, please refer to - * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, - * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and - * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors - */ -class ThrottlingRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _handleResponse) { - super(nextPolicy, options); - this.numberOfRetries = 0; - this._handleResponse = _handleResponse || this._defaultResponseHandler; - } - async sendRequest(httpRequest) { - const response = await this._nextPolicy.sendRequest(httpRequest.clone()); - if (response.status !== StatusCodes.TooManyRequests && - response.status !== StatusCodes.ServiceUnavailable) { - return response; - } - else { - return this._handleResponse(httpRequest, response); - } - } - async _defaultResponseHandler(httpRequest, httpResponse) { - var _a; - const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); - if (retryAfterHeader) { - const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); - if (delayInMs) { - this.numberOfRetries += 1; - await coreUtil.delay(delayInMs, { - abortSignal: httpRequest.abortSignal, - abortErrorMsg: StandardAbortMessage, - }); - if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw new abortController.AbortError(StandardAbortMessage); - } - if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { - return this.sendRequest(httpRequest); - } - else { - return this._nextPolicy.sendRequest(httpRequest); - } - } - } - return httpResponse; - } - static parseRetryAfterHeader(headerValue) { - const retryAfterInSeconds = Number(headerValue); - if (Number.isNaN(retryAfterInSeconds)) { - return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); - } - else { - return retryAfterInSeconds * 1000; - } - } - static parseDateRetryAfterHeader(headerValue) { - try { - const now = Date.now(); - const date = Date.parse(headerValue); - const diff = date - now; - return Number.isNaN(diff) ? undefined : diff; - } - catch (error) { - return undefined; - } - } -} - -// Copyright (c) Microsoft Corporation. -const createSpan = coreTracing.createSpanFunction({ - packagePrefix: "", - namespace: "", -}); -/** - * Creates a policy that wraps outgoing requests with a tracing span. - * @param tracingOptions - Tracing options. - * @returns An instance of the {@link TracingPolicy} class. - */ -function tracingPolicy(tracingOptions = {}) { - return { - create(nextPolicy, options) { - return new TracingPolicy(nextPolicy, options, tracingOptions); + }, +}; +const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", + type: { + name: "String", + }, + }, + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + }, + }, }, - }; -} -/** - * A policy that wraps outgoing requests with a tracing span. - */ -class TracingPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, tracingOptions) { - super(nextPolicy, options); - this.userAgent = tracingOptions.userAgent; - } - async sendRequest(request) { - if (!request.tracingContext) { - return this._nextPolicy.sendRequest(request); - } - const span = this.tryCreateSpan(request); - if (!span) { - return this._nextPolicy.sendRequest(request); - } - try { - const response = await this._nextPolicy.sendRequest(request); - this.tryProcessResponse(span, response); - return response; - } - catch (err) { - this.tryProcessError(span, err); - throw err; - } - } - tryCreateSpan(request) { - var _a; - try { - // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. - // We can pass this as a separate parameter once we upgrade to the latest core-tracing. - const { span } = createSpan(`HTTP ${request.method}`, { - tracingOptions: { - spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), - tracingContext: request.tracingContext, + }, +}; +const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String", }, - }); - // If the span is not recording, don't do any more work. - if (!span.isRecording()) { - span.end(); - return undefined; - } - const namespaceFromContext = (_a = request.tracingContext) === null || _a === void 0 ? void 0 : _a.getValue(Symbol.for("az.namespace")); - if (typeof namespaceFromContext === "string") { - span.setAttribute("az.namespace", namespaceFromContext); - } - span.setAttributes({ - "http.method": request.method, - "http.url": request.url, - requestId: request.requestId, - }); - if (this.userAgent) { - span.setAttribute("http.user_agent", this.userAgent); - } - // set headers - const spanContext = span.spanContext(); - const traceParentHeader = coreTracing.getTraceParentHeader(spanContext); - if (traceParentHeader && coreTracing.isSpanContextValid(spanContext)) { - request.headers.set("traceparent", traceParentHeader); - const traceState = spanContext.traceState && spanContext.traceState.serialize(); - // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent - if (traceState) { - request.headers.set("tracestate", traceState); - } - } - return span; - } - catch (error) { - logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`); - return undefined; - } - } - tryProcessError(span, err) { - try { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: err.message, - }); - if (err.statusCode) { - span.setAttribute("http.status_code", err.statusCode); - } - span.end(); - } - catch (error) { - logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); - } - } - tryProcessResponse(span, response) { - try { - span.setAttribute("http.status_code", response.status); - const serviceRequestId = response.headers.get("x-ms-request-id"); - if (serviceRequestId) { - span.setAttribute("serviceRequestId", serviceRequestId); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.OK, - }); - span.end(); - } - catch (error) { - logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); - } - } -} - -// Copyright (c) Microsoft Corporation. -/** - * ServiceClient sends service requests and receives responses. - */ -class ServiceClient { - /** - * The ServiceClient constructor - * @param credentials - The credentials used for authentication with the service. - * @param options - The service client options that govern the behavior of the client. - */ - constructor(credentials, - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ - options) { - if (!options) { - options = {}; - } - this._withCredentials = options.withCredentials || false; - this._httpClient = options.httpClient || getCachedDefaultHttpClient(); - this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); - let requestPolicyFactories; - if (Array.isArray(options.requestPolicyFactories)) { - logger.info("ServiceClient: using custom request policies"); - requestPolicyFactories = options.requestPolicyFactories; - } - else { - let authPolicyFactory = undefined; - if (coreAuth.isTokenCredential(credentials)) { - logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); - // Create a wrapped RequestPolicyFactory here so that we can provide the - // correct scope to the BearerTokenAuthenticationPolicy at the first time - // one is requested. This is needed because generated ServiceClient - // implementations do not set baseUri until after ServiceClient's constructor - // is finished, leaving baseUri empty at the time when it is needed to - // build the correct scope name. - const wrappedPolicyFactory = () => { - let bearerTokenPolicyFactory = undefined; - // eslint-disable-next-line @typescript-eslint/no-this-alias - const serviceClient = this; - const serviceClientOptions = options; - return { - create(nextPolicy, createOptions) { - const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); - if (!credentialScopes) { - throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); - } - if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { - bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); - } - return bearerTokenPolicyFactory.create(nextPolicy, createOptions); - }, - }; - }; - authPolicyFactory = wrappedPolicyFactory(); - } - else if (credentials && typeof credentials.signRequest === "function") { - logger.info("ServiceClient: creating signing policy from provided credentials"); - authPolicyFactory = signingPolicy(credentials); - } - else if (credentials !== undefined && credentials !== null) { - throw new Error("The credentials argument must implement the TokenCredential interface"); - } - logger.info("ServiceClient: using default request policies"); - requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options); - if (options.requestPolicyFactories) { - // options.requestPolicyFactories can also be a function that manipulates - // the default requestPolicyFactories array - const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); - if (newRequestPolicyFactories) { - requestPolicyFactories = newRequestPolicyFactories; - } - } - } - this._requestPolicyFactories = requestPolicyFactories; - } - /** - * Send the provided httpRequest. - */ - sendRequest(options) { - if (options === null || options === undefined || typeof options !== "object") { - throw new Error("options cannot be null or undefined and it must be of type object."); - } - let httpRequest; - try { - if (isWebResourceLike(options)) { - options.validateRequestProperties(); - httpRequest = options; - } - else { - httpRequest = new WebResource(); - httpRequest = httpRequest.prepare(options); - } - } - catch (error) { - return Promise.reject(error); - } - let httpPipeline = this._httpClient; - if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { - for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { - httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); - } - } - return httpPipeline.sendRequest(httpRequest); - } - /** - * Send an HTTP request that is populated using the provided OperationSpec. - * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. - * @param operationSpec - The OperationSpec to use to populate the httpRequest. - * @param callback - The callback to call when the response is received. - */ - async sendOperationRequest(operationArguments, operationSpec, callback) { - var _a; - if (typeof operationArguments.options === "function") { - callback = operationArguments.options; - operationArguments.options = undefined; - } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; - const httpRequest = new WebResource(); - let result; - try { - const baseUri = operationSpec.baseUrl || this.baseUri; - if (!baseUri) { - throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); - } - httpRequest.method = operationSpec.httpMethod; - httpRequest.operationSpec = operationSpec; - const requestUrl = URLBuilder.parse(baseUri); - if (operationSpec.path) { - requestUrl.appendPath(operationSpec.path); - } - if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { - for (const urlParameter of operationSpec.urlParameters) { - let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); - urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); - if (!urlParameter.skipEncoding) { - urlParameterValue = encodeURIComponent(urlParameterValue); - } - requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); - } - } - if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { - for (const queryParameter of operationSpec.queryParameters) { - let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); - if (queryParameterValue !== undefined && queryParameterValue !== null) { - queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null) { - if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { - if (queryParameterValue.length === 0) { - // The collection is empty, no need to try serializing the current queryParam - continue; - } - else { - for (const index in queryParameterValue) { - const item = queryParameterValue[index]; - queryParameterValue[index] = - item === undefined || item === null ? "" : item.toString(); - } - } - } - else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || - queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); - } - } - if (!queryParameter.skipEncoding) { - if (Array.isArray(queryParameterValue)) { - for (const index in queryParameterValue) { - if (queryParameterValue[index] !== undefined && - queryParameterValue[index] !== null) { - queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); - } - } - } - else { - queryParameterValue = encodeURIComponent(queryParameterValue); - } - } - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); - } - requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); - } - } - } - httpRequest.url = requestUrl.toString(); - const contentType = operationSpec.contentType || this.requestContentType; - if (contentType && operationSpec.requestBody) { - httpRequest.headers.set("Content-Type", contentType); - } - if (operationSpec.headerParameters) { - for (const headerParameter of operationSpec.headerParameters) { - let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); - if (headerValue !== undefined && headerValue !== null) { - headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); - const headerCollectionPrefix = headerParameter.mapper - .headerCollectionPrefix; - if (headerCollectionPrefix) { - for (const key of Object.keys(headerValue)) { - httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); - } - } - else { - httpRequest.headers.set(headerParameter.mapper.serializedName || - getPathStringFromParameter(headerParameter), headerValue); - } - } - } - } - const options = operationArguments.options; - if (options) { - if (options.customHeaders) { - for (const customHeaderName in options.customHeaders) { - httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); - } - } - if (options.abortSignal) { - httpRequest.abortSignal = options.abortSignal; - } - if (options.timeout) { - httpRequest.timeout = options.timeout; - } - if (options.onUploadProgress) { - httpRequest.onUploadProgress = options.onUploadProgress; - } - if (options.onDownloadProgress) { - httpRequest.onDownloadProgress = options.onDownloadProgress; - } - if (options.spanOptions) { - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - httpRequest.spanOptions = options.spanOptions; - } - if (options.tracingContext) { - httpRequest.tracingContext = options.tracingContext; - } - if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { - httpRequest.shouldDeserialize = options.shouldDeserialize; - } - } - httpRequest.withCredentials = this._withCredentials; - serializeRequestBody(this, httpRequest, operationArguments, operationSpec); - if (httpRequest.streamResponseStatusCodes === undefined) { - httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); - } - let rawResponse; - let sendRequestError; - try { - rawResponse = await this.sendRequest(httpRequest); - } - catch (error) { - sendRequestError = error; - } - if (sendRequestError) { - if (sendRequestError.response) { - sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || - operationSpec.responses["default"]); - } - result = Promise.reject(sendRequestError); - } - else { - result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); - } - } - catch (error) { - result = Promise.reject(error); - } - const cb = callback; - if (cb) { - result - .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) - .catch((err) => cb(err)); - } - return result; - } -} -function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { - var _a, _b, _c, _d, _e, _f; - const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; - const updatedOptions = { - rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", - includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, - xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, - }; - const xmlCharKey = serializerOptions.xmlCharKey; - if (operationSpec.requestBody && operationSpec.requestBody.mapper) { - httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); - const bodyMapper = operationSpec.requestBody.mapper; - const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; - const typeName = bodyMapper.type.name; - try { - if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { - const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); - httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); - const isStream = typeName === MapperType.Stream; - if (operationSpec.isXML) { - const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; - const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); - if (typeName === MapperType.Sequence) { - httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { - rootName: xmlName || serializedName, - xmlCharKey, - }); - } - else if (!isStream) { - httpRequest.body = stringifyXML(value, { - rootName: xmlName || serializedName, - xmlCharKey, - }); - } - } - else if (typeName === MapperType.String && - (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match("text/plain")) || operationSpec.mediaType === "text")) { - // the String serializer has validated that request body is a string - // so just send the string. - return; - } - else if (!isStream) { - httpRequest.body = JSON.stringify(httpRequest.body); - } - } - } - catch (error) { - throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); - } - } - else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { - httpRequest.formData = {}; - for (const formDataParameter of operationSpec.formDataParameters) { - const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); - if (formDataParameterValue !== undefined && formDataParameterValue !== null) { - const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); - httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); - } - } - } -} -/** - * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself - */ -function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { - // Composite and Sequence schemas already got their root namespace set during serialization - // We just need to add xmlns to the other schema types - if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { - const result = {}; - result[options.xmlCharKey] = serializedValue; - result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; - return result; - } - return serializedValue; -} -function getValueOrFunctionResult(value, defaultValueCreator) { - let result; - if (typeof value === "string") { - result = value; - } - else { - result = defaultValueCreator(); - if (typeof value === "function") { - result = value(result); - } - } - return result; -} -function createDefaultRequestPolicyFactories(authPolicyFactory, options) { - const factories = []; - if (options.generateClientRequestIdHeader) { - factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); - } - if (authPolicyFactory) { - factories.push(authPolicyFactory); - } - const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); - const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); - if (userAgentHeaderName && userAgentHeaderValue) { - factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); - } - factories.push(redirectPolicy()); - factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); - if (!options.noRetryPolicy) { - factories.push(exponentialRetryPolicy()); - factories.push(systemErrorRetryPolicy()); - factories.push(throttlingRetryPolicy()); - } - factories.push(deserializationPolicy(options.deserializationContentTypes)); - if (coreUtil.isNode) { - factories.push(proxyPolicy(options.proxySettings)); - } - factories.push(logPolicy({ logger: logger.info })); - return factories; -} -/** - * Creates an HTTP pipeline based on the given options. - * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. - * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. - * @returns A set of options that can be passed to create a new {@link ServiceClient}. - */ -function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { - const requestPolicyFactories = []; - if (pipelineOptions.sendStreamingJson) { - requestPolicyFactories.push(ndJsonPolicy()); - } - let userAgentValue = undefined; - if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { - const userAgentInfo = []; - userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); - // Add the default user agent value if it isn't already specified - // by the userAgentPrefix option. - const defaultUserAgentInfo = getDefaultUserAgentValue(); - if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { - userAgentInfo.push(defaultUserAgentInfo); - } - userAgentValue = userAgentInfo.join(" "); - } - const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); - const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); - const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); - if (coreUtil.isNode) { - requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); - } - const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); - const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); - requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); - if (redirectOptions.handleRedirects) { - requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); - } - if (authPolicyFactory) { - requestPolicyFactories.push(authPolicyFactory); - } - requestPolicyFactories.push(logPolicy(loggingOptions)); - if (coreUtil.isNode && pipelineOptions.decompressResponse === false) { - requestPolicyFactories.push(disableResponseDecompressionPolicy()); - } - return { - httpClient: pipelineOptions.httpClient, - requestPolicyFactories, - }; -} -function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { - return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); -} -function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { - var _a; - let value; - if (typeof parameterPath === "string") { - parameterPath = [parameterPath]; - } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; - if (Array.isArray(parameterPath)) { - if (parameterPath.length > 0) { - if (parameterMapper.isConstant) { - value = parameterMapper.defaultValue; - } - else { - let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); - if (!propertySearchResult.propertyFound) { - propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); - } - let useDefaultValue = false; - if (!propertySearchResult.propertyFound) { - useDefaultValue = - parameterMapper.required || - (parameterPath[0] === "options" && parameterPath.length === 2); - } - value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; - } - // Serialize just for validation purposes. - const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); - serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); - } - } - else { - if (parameterMapper.required) { - value = {}; - } - for (const propertyName in parameterPath) { - const propertyMapper = parameterMapper.type.modelProperties[propertyName]; - const propertyPath = parameterPath[propertyName]; - const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); - // Serialize just for validation purposes. - const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); - serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); - if (propertyValue !== undefined && propertyValue !== null) { - if (!value) { - value = {}; - } - value[propertyName] = propertyValue; - } - } - } - return value; -} -function getPropertyFromParameterPath(parent, parameterPath) { - const result = { propertyFound: false }; - let i = 0; - for (; i < parameterPath.length; ++i) { - const parameterPathPart = parameterPath[i]; - // Make sure to check inherited properties too, so don't use hasOwnProperty(). - if (parent !== undefined && parent !== null && parameterPathPart in parent) { - parent = parent[parameterPathPart]; - } - else { - break; - } - } - if (i === parameterPath.length) { - result.propertyValue = parent; - result.propertyFound = true; - } - return result; -} -/** - * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). - * @param _response - Wrapper object for http response. - * @param responseSpec - Mappers for how to parse the response properties. - * @returns - A normalized response object. - */ -function flattenResponse(_response, responseSpec) { - const parsedHeaders = _response.parsedHeaders; - const bodyMapper = responseSpec && responseSpec.bodyMapper; - const addOperationResponse = (obj) => { - return Object.defineProperty(obj, "_response", { - value: _response, - }); - }; - if (bodyMapper) { - const typeName = bodyMapper.type.name; - if (typeName === "Stream") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); - } - const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; - const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); - if (typeName === "Sequence" || isPageableResponse) { - const arrayResponse = [...(_response.parsedBody || [])]; - for (const key of Object.keys(modelProperties)) { - if (modelProperties[key].serializedName) { - arrayResponse[key] = _response.parsedBody[key]; - } - } - if (parsedHeaders) { - for (const key of Object.keys(parsedHeaders)) { - arrayResponse[key] = parsedHeaders[key]; - } - } - addOperationResponse(arrayResponse); - return arrayResponse; - } - if (typeName === "Composite" || typeName === "Dictionary") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); - } - } - if (bodyMapper || - _response.request.method === "HEAD" || - isPrimitiveType(_response.parsedBody)) { - // primitive body types and HEAD booleans - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); - } - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); -} -function getCredentialScopes(options, baseUri) { - if (options === null || options === void 0 ? void 0 : options.credentialScopes) { - return options.credentialScopes; - } - if (baseUri) { - return `${baseUri}/.default`; - } - return undefined; -} - -// Copyright (c) Microsoft Corporation. -/** - * This function is only here for compatibility. Use createSpanFunction in core-tracing. - * - * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. - * @hidden - - * @param spanConfig - The name of the operation being performed. - * @param tracingOptions - The options for the underlying http request. - */ -function createSpanFunction(args) { - return coreTracing.createSpanFunction(args); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Defines the default token refresh buffer duration. - */ -const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes -/** - * Provides an {@link AccessTokenCache} implementation which clears - * the cached {@link AccessToken}'s after the expiresOnTimestamp has - * passed. - * - * @deprecated No longer used in the bearer authorization policy. - */ -class ExpiringAccessTokenCache { - /** - * Constructs an instance of {@link ExpiringAccessTokenCache} with - * an optional expiration buffer time. - */ - constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { - this.cachedToken = undefined; - this.tokenRefreshBufferMs = tokenRefreshBufferMs; - } - /** - * Saves an access token into the internal in-memory cache. - * @param accessToken - Access token or undefined to clear the cache. - */ - setCachedToken(accessToken) { - this.cachedToken = accessToken; - } - /** - * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. - */ - getCachedToken() { - if (this.cachedToken && - Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { - this.cachedToken = undefined; - } - return this.cachedToken; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. - * - * @deprecated No longer used in the bearer authorization policy. - */ -class AccessTokenRefresher { - constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { - this.credential = credential; - this.scopes = scopes; - this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; - this.lastCalled = 0; - } - /** - * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying - * that we are ready for a new refresh. - */ - isReady() { - // We're only ready for a new refresh if the required milliseconds have passed. - return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); - } - /** - * Stores the time in which it is called, - * then requests a new token, - * then sets this.promise to undefined, - * then returns the token. - */ - async getToken(options) { - this.lastCalled = Date.now(); - const token = await this.credential.getToken(this.scopes, options); - this.promise = undefined; - return token || undefined; - } - /** - * Requests a new token if we're not currently waiting for a new token. - * Returns null if the required time between each call hasn't been reached. - */ - refresh(options) { - if (!this.promise) { - this.promise = this.getToken(options); - } - return this.promise; - } -} - -// Copyright (c) Microsoft Corporation. -const HeaderConstants = Constants.HeaderConstants; -const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; -/** - * A simple {@link ServiceClientCredential} that authenticates with a username and a password. - */ -class BasicAuthenticationCredentials { - /** - * Creates a new BasicAuthenticationCredentials object. - * - * @param userName - User name. - * @param password - Password. - * @param authorizationScheme - The authorization scheme. - */ - constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { - /** - * Authorization scheme. Defaults to "Basic". - * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes - */ - this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; - if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { - throw new Error("userName cannot be null or undefined and must be of type string."); - } - if (password === null || password === undefined || typeof password.valueOf() !== "string") { - throw new Error("password cannot be null or undefined and must be of type string."); - } - this.userName = userName; - this.password = password; - this.authorizationScheme = authorizationScheme; - } - /** - * Signs a request with the Authentication header. - * - * @param webResource - The WebResourceLike to be signed. - * @returns The signed request object. - */ - signRequest(webResource) { - const credentials = `${this.userName}:${this.password}`; - const encodedCredentials = `${this.authorizationScheme} ${encodeString(credentials)}`; - if (!webResource.headers) - webResource.headers = new HttpHeaders(); - webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); - return Promise.resolve(webResource); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Authenticates to a service using an API key. - */ -class ApiKeyCredentials { - /** - * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. - */ - constructor(options) { - if (!options || (options && !options.inHeader && !options.inQuery)) { - throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); - } - this.inHeader = options.inHeader; - this.inQuery = options.inQuery; - } - /** - * Signs a request with the values provided in the inHeader and inQuery parameter. - * - * @param webResource - The WebResourceLike to be signed. - * @returns The signed request object. - */ - signRequest(webResource) { - if (!webResource) { - return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); - } - if (this.inHeader) { - if (!webResource.headers) { - webResource.headers = new HttpHeaders(); - } - for (const headerName in this.inHeader) { - webResource.headers.set(headerName, this.inHeader[headerName]); - } - } - if (this.inQuery) { - if (!webResource.url) { - return Promise.reject(new Error(`url cannot be null in the request object.`)); - } - if (webResource.url.indexOf("?") < 0) { - webResource.url += "?"; - } - for (const key in this.inQuery) { - if (!webResource.url.endsWith("?")) { - webResource.url += "&"; - } - webResource.url += `${key}=${this.inQuery[key]}`; - } - } - return Promise.resolve(webResource); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * A {@link TopicCredentials} object used for Azure Event Grid. - */ -class TopicCredentials extends ApiKeyCredentials { - /** - * Creates a new EventGrid TopicCredentials object. - * - * @param topicKey - The EventGrid topic key - */ - constructor(topicKey) { - if (!topicKey || (topicKey && typeof topicKey !== "string")) { - throw new Error("topicKey cannot be null or undefined and must be of type string."); - } - const options = { - inHeader: { - "aeg-sas-key": topicKey, }, - }; - super(options); - } -} - -Object.defineProperty(exports, "delay", ({ - enumerable: true, - get: function () { return coreUtil.delay; } -})); -Object.defineProperty(exports, "isNode", ({ - enumerable: true, - get: function () { return coreUtil.isNode; } -})); -Object.defineProperty(exports, "isTokenCredential", ({ - enumerable: true, - get: function () { return coreAuth.isTokenCredential; } -})); -exports.AccessTokenRefresher = AccessTokenRefresher; -exports.ApiKeyCredentials = ApiKeyCredentials; -exports.BaseRequestPolicy = BaseRequestPolicy; -exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; -exports.Constants = Constants; -exports.DefaultHttpClient = NodeFetchHttpClient; -exports.ExpiringAccessTokenCache = ExpiringAccessTokenCache; -exports.HttpHeaders = HttpHeaders; -exports.MapperType = MapperType; -exports.RequestPolicyOptions = RequestPolicyOptions; -exports.RestError = RestError; -exports.Serializer = Serializer; -exports.ServiceClient = ServiceClient; -exports.TopicCredentials = TopicCredentials; -exports.URLBuilder = URLBuilder; -exports.URLQuery = URLQuery; -exports.WebResource = WebResource; -exports.XML_ATTRKEY = XML_ATTRKEY; -exports.XML_CHARKEY = XML_CHARKEY; -exports.applyMixins = applyMixins; -exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; -exports.createPipelineFromOptions = createPipelineFromOptions; -exports.createSpanFunction = createSpanFunction; -exports.deserializationPolicy = deserializationPolicy; -exports.deserializeResponseBody = deserializeResponseBody; -exports.disableResponseDecompressionPolicy = disableResponseDecompressionPolicy; -exports.encodeUri = encodeUri; -exports.executePromisesSequentially = executePromisesSequentially; -exports.exponentialRetryPolicy = exponentialRetryPolicy; -exports.flattenResponse = flattenResponse; -exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; -exports.generateUuid = generateUuid; -exports.getDefaultProxySettings = getDefaultProxySettings; -exports.getDefaultUserAgentValue = getDefaultUserAgentValue; -exports.isDuration = isDuration; -exports.isValidUuid = isValidUuid; -exports.keepAlivePolicy = keepAlivePolicy; -exports.logPolicy = logPolicy; -exports.operationOptionsToRequestOptionsBase = operationOptionsToRequestOptionsBase; -exports.parseXML = parseXML; -exports.promiseToCallback = promiseToCallback; -exports.promiseToServiceCallback = promiseToServiceCallback; -exports.proxyPolicy = proxyPolicy; -exports.redirectPolicy = redirectPolicy; -exports.serializeObject = serializeObject; -exports.signingPolicy = signingPolicy; -exports.stringifyXML = stringifyXML; -exports.stripRequest = stripRequest; -exports.stripResponse = stripResponse; -exports.systemErrorRetryPolicy = systemErrorRetryPolicy; -exports.throttlingRetryPolicy = throttlingRetryPolicy; -exports.tracingPolicy = tracingPolicy; -exports.userAgentPolicy = userAgentPolicy; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 51695: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var api = __nccwpck_require__(32451); - -// Copyright (c) Microsoft Corporation. -(function (SpanKind) { - /** Default value. Indicates that the span is used internally. */ - SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; - /** - * Indicates that the span covers server-side handling of an RPC or other - * remote request. - */ - SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; - /** - * Indicates that the span covers the client-side wrapper around an RPC or - * other remote request. - */ - SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; - /** - * Indicates that the span describes producer sending a message to a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; - /** - * Indicates that the span describes consumer receiving a message from a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; -})(exports.SpanKind || (exports.SpanKind = {})); -/** - * Return the span if one exists - * - * @param context - context to get span from - */ -function getSpan(context) { - return api.trace.getSpan(context); -} -/** - * Set the span on a context - * - * @param context - context to use as parent - * @param span - span to set active - */ -function setSpan(context, span) { - return api.trace.setSpan(context, span); -} -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context - context to set active span on - * @param spanContext - span context to be wrapped - */ -function setSpanContext(context, spanContext) { - return api.trace.setSpanContext(context, spanContext); -} -/** - * Get the span context of the span if it exists. - * - * @param context - context to get values from - */ -function getSpanContext(context) { - return api.trace.getSpanContext(context); -} -/** - * Returns true of the given {@link SpanContext} is valid. - * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. - * - * @param context - the {@link SpanContext} to validate. - * - * @returns true if the {@link SpanContext} is valid, false otherwise. - */ -function isSpanContextValid(context) { - return api.trace.isSpanContextValid(context); -} -function getTracer(name, version) { - return api.trace.getTracer(name || "azure/core-tracing", version); -} -/** Entrypoint for context API */ -const context = api.context; -(function (SpanStatusCode) { - /** - * The default status. - */ - SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; - /** - * The operation has been validated by an Application developer or - * Operator to have completed successfully. - */ - SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; - /** - * The operation contains an error. - */ - SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; -})(exports.SpanStatusCode || (exports.SpanStatusCode = {})); - -// Copyright (c) Microsoft Corporation. -function isTracingDisabled() { - var _a; - if (typeof process === "undefined") { - // not supported in browser for now without polyfills - return false; - } - const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { - return false; - } - return Boolean(azureTracingDisabledValue); -} -/** - * Creates a function that can be used to create spans using the global tracer. - * - * Usage: - * - * ```typescript - * // once - * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); - * - * // in each operation - * const span = createSpan("deleteConfigurationSetting", operationOptions); - * // code... - * span.end(); - * ``` - * - * @hidden - * @param args - allows configuration of the prefix for each span as well as the az.namespace field. - */ -function createSpanFunction(args) { - return function (operationName, operationOptions) { - const tracer = getTracer(); - const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; - const spanOptions = Object.assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); - const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; - let span; - if (isTracingDisabled()) { - span = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); - } - else { - span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); - } - if (args.namespace) { - span.setAttribute("az.namespace", args.namespace); - } - let newSpanOptions = tracingOptions.spanOptions || {}; - if (span.isRecording() && args.namespace) { - newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); - } - const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); - const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); - return { - span, - updatedOptions: newOperationOptions - }; - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const VERSION = "00"; -/** - * Generates a `SpanContext` given a `traceparent` header value. - * @param traceParent - Serialized span context data as a `traceparent` header value. - * @returns The `SpanContext` generated from the `traceparent` value. - */ -function extractSpanContextFromTraceParentHeader(traceParentHeader) { - const parts = traceParentHeader.split("-"); - if (parts.length !== 4) { - return; - } - const [version, traceId, spanId, traceOptions] = parts; - if (version !== VERSION) { - return; - } - const traceFlags = parseInt(traceOptions, 16); - const spanContext = { - spanId, - traceId, - traceFlags - }; - return spanContext; -} -/** - * Generates a `traceparent` value given a span context. - * @param spanContext - Contains context for a specific span. - * @returns The `spanContext` represented as a `traceparent` value. - */ -function getTraceParentHeader(spanContext) { - const missingFields = []; - if (!spanContext.traceId) { - missingFields.push("traceId"); - } - if (!spanContext.spanId) { - missingFields.push("spanId"); - } - if (missingFields.length) { - return; - } - const flags = spanContext.traceFlags || 0 /* NONE */; - const hexFlags = flags.toString(16); - const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; - // https://www.w3.org/TR/trace-context/#traceparent-header-field-values - return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; -} - -exports.context = context; -exports.createSpanFunction = createSpanFunction; -exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader; -exports.getSpan = getSpan; -exports.getSpanContext = getSpanContext; -exports.getTraceParentHeader = getTraceParentHeader; -exports.getTracer = getTracer; -exports.isSpanContextValid = isSpanContextValid; -exports.setSpan = setSpan; -exports.setSpanContext = setSpanContext; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 46045: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var coreHttp = __nccwpck_require__(49584); -var tslib = __nccwpck_require__(93167); -var coreTracing = __nccwpck_require__(51695); -var logger$1 = __nccwpck_require__(52316); -var abortController = __nccwpck_require__(9694); -var os = __nccwpck_require__(22037); -var crypto = __nccwpck_require__(6113); -var stream = __nccwpck_require__(12781); -__nccwpck_require__(15211); -var coreLro = __nccwpck_require__(2707); -var events = __nccwpck_require__(82361); -var fs = __nccwpck_require__(57147); -var util = __nccwpck_require__(73837); - -function _interopNamespace(e) { - if (e && e.__esModule) return e; - var n = Object.create(null); - if (e) { - Object.keys(e).forEach(function (k) { - if (k !== 'default') { - var d = Object.getOwnPropertyDescriptor(e, k); - Object.defineProperty(n, k, d.get ? d : { - enumerable: true, - get: function () { return e[k]; } - }); - } - }); - } - n["default"] = e; - return Object.freeze(n); -} - -var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); -var os__namespace = /*#__PURE__*/_interopNamespace(os); -var fs__namespace = /*#__PURE__*/_interopNamespace(fs); -var util__namespace = /*#__PURE__*/_interopNamespace(util); - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -const BlobServiceProperties = { - serializedName: "BlobServiceProperties", - xmlName: "StorageServiceProperties", + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String", + }, + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", + type: { + name: "String", + }, + }, + }, + }, +}; +const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", type: { name: "Composite", - className: "BlobServiceProperties", + className: "ListBlobsFlatSegmentResponse", modelProperties: { - blobAnalyticsLogging: { - serializedName: "Logging", - xmlName: "Logging", + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, type: { - name: "Composite", - className: "Logging" - } + name: "String", + }, }, - hourMetrics: { - serializedName: "HourMetrics", - xmlName: "HourMetrics", + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, type: { - name: "Composite", - className: "Metrics" - } + name: "String", + }, }, - minuteMetrics: { - serializedName: "MinuteMetrics", - xmlName: "MinuteMetrics", + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", type: { - name: "Composite", - className: "Metrics" - } + name: "String", + }, }, - cors: { - serializedName: "Cors", - xmlName: "Cors", - xmlIsWrapped: true, - xmlElementName: "CorsRule", + marker: { + serializedName: "Marker", + xmlName: "Marker", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "CorsRule" - } - } - } - }, - defaultServiceVersion: { - serializedName: "DefaultServiceVersion", - xmlName: "DefaultServiceVersion", - type: { - name: "String" - } + name: "String", + }, }, - deleteRetentionPolicy: { - serializedName: "DeleteRetentionPolicy", - xmlName: "DeleteRetentionPolicy", + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", type: { - name: "Composite", - className: "RetentionPolicy" - } + name: "Number", + }, }, - staticWebsite: { - serializedName: "StaticWebsite", - xmlName: "StaticWebsite", + segment: { + serializedName: "Segment", + xmlName: "Blobs", type: { name: "Composite", - className: "StaticWebsite" - } - } - } - } -}; -const Logging = { - serializedName: "Logging", - type: { - name: "Composite", - className: "Logging", - modelProperties: { - version: { - serializedName: "Version", - required: true, - xmlName: "Version", - type: { - name: "String" - } - }, - deleteProperty: { - serializedName: "Delete", - required: true, - xmlName: "Delete", - type: { - name: "Boolean" - } - }, - read: { - serializedName: "Read", - required: true, - xmlName: "Read", - type: { - name: "Boolean" - } + className: "BlobFlatListSegment", + }, }, - write: { - serializedName: "Write", - required: true, - xmlName: "Write", + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", type: { - name: "Boolean" - } + name: "String", + }, }, - retentionPolicy: { - serializedName: "RetentionPolicy", - xmlName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } - } - } - } + }, + }, }; -const RetentionPolicy = { - serializedName: "RetentionPolicy", +const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", type: { name: "Composite", - className: "RetentionPolicy", + className: "BlobFlatListSegment", modelProperties: { - enabled: { - serializedName: "Enabled", + blobItems: { + serializedName: "BlobItems", required: true, - xmlName: "Enabled", + xmlName: "BlobItems", + xmlElementName: "Blob", type: { - name: "Boolean" - } - }, - days: { - constraints: { - InclusiveMinimum: 1 + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal", + }, + }, }, - serializedName: "Days", - xmlName: "Days", - type: { - name: "Number" - } - } - } - } + }, + }, + }, }; -const Metrics = { - serializedName: "Metrics", +const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", type: { name: "Composite", - className: "Metrics", + className: "BlobItemInternal", modelProperties: { - version: { - serializedName: "Version", - xmlName: "Version", + name: { + serializedName: "Name", + xmlName: "Name", type: { - name: "String" - } + name: "Composite", + className: "BlobName", + }, }, - enabled: { - serializedName: "Enabled", + deleted: { + serializedName: "Deleted", required: true, - xmlName: "Enabled", - type: { - name: "Boolean" - } - }, - includeAPIs: { - serializedName: "IncludeAPIs", - xmlName: "IncludeAPIs", + xmlName: "Deleted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, - retentionPolicy: { - serializedName: "RetentionPolicy", - xmlName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } - } - } - } -}; -const CorsRule = { - serializedName: "CorsRule", - type: { - name: "Composite", - className: "CorsRule", - modelProperties: { - allowedOrigins: { - serializedName: "AllowedOrigins", + snapshot: { + serializedName: "Snapshot", required: true, - xmlName: "AllowedOrigins", + xmlName: "Snapshot", type: { - name: "String" - } + name: "String", + }, }, - allowedMethods: { - serializedName: "AllowedMethods", - required: true, - xmlName: "AllowedMethods", + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", type: { - name: "String" - } + name: "String", + }, }, - allowedHeaders: { - serializedName: "AllowedHeaders", - required: true, - xmlName: "AllowedHeaders", + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", type: { - name: "String" - } + name: "Boolean", + }, }, - exposedHeaders: { - serializedName: "ExposedHeaders", - required: true, - xmlName: "ExposedHeaders", + properties: { + serializedName: "Properties", + xmlName: "Properties", type: { - name: "String" - } - }, - maxAgeInSeconds: { - constraints: { - InclusiveMinimum: 0 + name: "Composite", + className: "BlobPropertiesInternal", }, - serializedName: "MaxAgeInSeconds", - required: true, - xmlName: "MaxAgeInSeconds", - type: { - name: "Number" - } - } - } - } -}; -const StaticWebsite = { - serializedName: "StaticWebsite", - type: { - name: "Composite", - className: "StaticWebsite", - modelProperties: { - enabled: { - serializedName: "Enabled", - required: true, - xmlName: "Enabled", + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", type: { - name: "Boolean" - } + name: "Dictionary", + value: { type: { name: "String" } }, + }, }, - indexDocument: { - serializedName: "IndexDocument", - xmlName: "IndexDocument", + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", type: { - name: "String" - } + name: "Composite", + className: "BlobTags", + }, }, - errorDocument404Path: { - serializedName: "ErrorDocument404Path", - xmlName: "ErrorDocument404Path", + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", type: { - name: "String" - } + name: "Dictionary", + value: { type: { name: "String" } }, + }, }, - defaultIndexDocumentPath: { - serializedName: "DefaultIndexDocumentPath", - xmlName: "DefaultIndexDocumentPath", + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", type: { - name: "String" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; -const StorageError = { - serializedName: "StorageError", +const BlobName = { + serializedName: "BlobName", type: { name: "Composite", - className: "StorageError", + className: "BlobName", modelProperties: { - message: { - serializedName: "Message", - xmlName: "Message", + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, type: { - name: "String" - } + name: "Boolean", + }, }, - code: { - serializedName: "Code", - xmlName: "Code", - type: { - name: "String" - } - } - } - } -}; -const BlobServiceStatistics = { - serializedName: "BlobServiceStatistics", - xmlName: "StorageServiceStats", - type: { - name: "Composite", - className: "BlobServiceStatistics", - modelProperties: { - geoReplication: { - serializedName: "GeoReplication", - xmlName: "GeoReplication", + content: { + serializedName: "content", + xmlName: "content", + xmlIsMsText: true, type: { - name: "Composite", - className: "GeoReplication" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const GeoReplication = { - serializedName: "GeoReplication", +const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", type: { name: "Composite", - className: "GeoReplication", + className: "BlobPropertiesInternal", modelProperties: { - status: { - serializedName: "Status", - required: true, - xmlName: "Status", + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", type: { - name: "Enum", - allowedValues: ["live", "bootstrap", "unavailable"] - } + name: "DateTimeRfc1123", + }, }, - lastSyncOn: { - serializedName: "LastSyncTime", - required: true, - xmlName: "LastSyncTime", - type: { - name: "DateTimeRfc1123" - } - } - } - } -}; -const ListContainersSegmentResponse = { - serializedName: "ListContainersSegmentResponse", - xmlName: "EnumerationResults", - type: { - name: "Composite", - className: "ListContainersSegmentResponse", - modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", + lastModified: { + serializedName: "Last-Modified", required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, + xmlName: "Last-Modified", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - prefix: { - serializedName: "Prefix", - xmlName: "Prefix", + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", type: { - name: "String" - } + name: "String", + }, }, - marker: { - serializedName: "Marker", - xmlName: "Marker", + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", type: { - name: "String" - } + name: "Number", + }, }, - maxPageSize: { - serializedName: "MaxResults", - xmlName: "MaxResults", + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", type: { - name: "Number" - } + name: "String", + }, }, - containerItems: { - serializedName: "ContainerItems", - required: true, - xmlName: "Containers", - xmlIsWrapped: true, - xmlElementName: "Container", + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ContainerItem" - } - } - } + name: "String", + }, }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } - } -}; -const ContainerItem = { - serializedName: "ContainerItem", - xmlName: "Container", - type: { - name: "Composite", - className: "ContainerItem", - modelProperties: { - name: { - serializedName: "Name", - required: true, - xmlName: "Name", + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", type: { - name: "String" - } + name: "String", + }, }, - deleted: { - serializedName: "Deleted", - xmlName: "Deleted", + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", type: { - name: "Boolean" - } + name: "ByteArray", + }, }, - version: { - serializedName: "Version", - xmlName: "Version", + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", type: { - name: "String" - } + name: "String", + }, }, - properties: { - serializedName: "Properties", - xmlName: "Properties", + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", type: { - name: "Composite", - className: "ContainerProperties" - } + name: "String", + }, }, - metadata: { - serializedName: "Metadata", - xmlName: "Metadata", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - } - } - } -}; -const ContainerProperties = { - serializedName: "ContainerProperties", - type: { - name: "Composite", - className: "ContainerProperties", - modelProperties: { - lastModified: { - serializedName: "Last-Modified", - required: true, - xmlName: "Last-Modified", + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", type: { - name: "DateTimeRfc1123" - } + name: "Number", + }, }, - etag: { - serializedName: "Etag", - required: true, - xmlName: "Etag", + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", type: { - name: "String" - } + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, leaseStatus: { serializedName: "LeaseStatus", xmlName: "LeaseStatus", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, leaseState: { serializedName: "LeaseState", @@ -12394,366 +9452,227 @@ const ContainerProperties = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseDuration: { serializedName: "LeaseDuration", xmlName: "LeaseDuration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, - publicAccess: { - serializedName: "PublicAccess", - xmlName: "PublicAccess", + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", type: { - name: "Enum", - allowedValues: ["container", "blob"] - } + name: "String", + }, }, - hasImmutabilityPolicy: { - serializedName: "HasImmutabilityPolicy", - xmlName: "HasImmutabilityPolicy", + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", type: { - name: "Boolean" - } + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, - hasLegalHold: { - serializedName: "HasLegalHold", - xmlName: "HasLegalHold", + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", type: { - name: "Boolean" - } + name: "String", + }, }, - defaultEncryptionScope: { - serializedName: "DefaultEncryptionScope", - xmlName: "DefaultEncryptionScope", + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", type: { - name: "String" - } + name: "String", + }, }, - preventEncryptionScopeOverride: { - serializedName: "DenyEncryptionScopeOverride", - xmlName: "DenyEncryptionScopeOverride", + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", type: { - name: "Boolean" - } + name: "DateTimeRfc1123", + }, }, - deletedOn: { - serializedName: "DeletedTime", - xmlName: "DeletedTime", + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - remainingRetentionDays: { - serializedName: "RemainingRetentionDays", - xmlName: "RemainingRetentionDays", + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", type: { - name: "Number" - } + name: "Boolean", + }, }, - isImmutableStorageWithVersioningEnabled: { - serializedName: "ImmutableStorageWithVersioningEnabled", - xmlName: "ImmutableStorageWithVersioningEnabled", - type: { - name: "Boolean" - } - } - } - } -}; -const KeyInfo = { - serializedName: "KeyInfo", - type: { - name: "Composite", - className: "KeyInfo", - modelProperties: { - startsOn: { - serializedName: "Start", - required: true, - xmlName: "Start", + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", type: { - name: "String" - } + name: "Boolean", + }, }, - expiresOn: { - serializedName: "Expiry", - required: true, - xmlName: "Expiry", - type: { - name: "String" - } - } - } - } -}; -const UserDelegationKey = { - serializedName: "UserDelegationKey", - type: { - name: "Composite", - className: "UserDelegationKey", - modelProperties: { - signedObjectId: { - serializedName: "SignedOid", - required: true, - xmlName: "SignedOid", + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", type: { - name: "String" - } + name: "String", + }, }, - signedTenantId: { - serializedName: "SignedTid", - required: true, - xmlName: "SignedTid", + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - signedStartsOn: { - serializedName: "SignedStart", - required: true, - xmlName: "SignedStart", + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", type: { - name: "String" - } + name: "Number", + }, }, - signedExpiresOn: { - serializedName: "SignedExpiry", - required: true, - xmlName: "SignedExpiry", + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", type: { - name: "String" - } + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold", + ], + }, }, - signedService: { - serializedName: "SignedService", - required: true, - xmlName: "SignedService", + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", type: { - name: "String" - } + name: "Boolean", + }, }, - signedVersion: { - serializedName: "SignedVersion", - required: true, - xmlName: "SignedVersion", + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", type: { - name: "String" - } + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", + "rehydrate-pending-to-cold", + ], + }, }, - value: { - serializedName: "Value", - required: true, - xmlName: "Value", - type: { - name: "String" - } - } - } - } -}; -const FilterBlobSegment = { - serializedName: "FilterBlobSegment", - xmlName: "EnumerationResults", - type: { - name: "Composite", - className: "FilterBlobSegment", - modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", - required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", type: { - name: "String" - } + name: "String", + }, }, - where: { - serializedName: "Where", - required: true, - xmlName: "Where", + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", type: { - name: "String" - } + name: "String", + }, }, - blobs: { - serializedName: "Blobs", - required: true, - xmlName: "Blobs", - xmlIsWrapped: true, - xmlElementName: "Blob", + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "FilterBlobItem" - } - } - } + name: "DateTimeRfc1123", + }, }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } - } -}; -const FilterBlobItem = { - serializedName: "FilterBlobItem", - xmlName: "Blob", - type: { - name: "Composite", - className: "FilterBlobItem", - modelProperties: { - name: { - serializedName: "Name", - required: true, - xmlName: "Name", + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", type: { - name: "String" - } + name: "Number", + }, }, - containerName: { - serializedName: "ContainerName", - required: true, - xmlName: "ContainerName", + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - tags: { - serializedName: "Tags", - xmlName: "Tags", - type: { - name: "Composite", - className: "BlobTags" - } - } - } - } -}; -const BlobTags = { - serializedName: "BlobTags", - xmlName: "Tags", - type: { - name: "Composite", - className: "BlobTags", - modelProperties: { - blobTagSet: { - serializedName: "BlobTagSet", - required: true, - xmlName: "TagSet", - xmlIsWrapped: true, - xmlElementName: "Tag", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobTag" - } - } - } - } - } - } -}; -const BlobTag = { - serializedName: "BlobTag", - xmlName: "Tag", - type: { - name: "Composite", - className: "BlobTag", - modelProperties: { - key: { - serializedName: "Key", - required: true, - xmlName: "Key", + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", type: { - name: "String" - } + name: "Boolean", + }, }, - value: { - serializedName: "Value", - required: true, - xmlName: "Value", - type: { - name: "String" - } - } - } - } -}; -const SignedIdentifier = { - serializedName: "SignedIdentifier", - xmlName: "SignedIdentifier", - type: { - name: "Composite", - className: "SignedIdentifier", - modelProperties: { - id: { - serializedName: "Id", - required: true, - xmlName: "Id", + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", type: { - name: "String" - } + name: "Enum", + allowedValues: ["High", "Standard"], + }, }, - accessPolicy: { - serializedName: "AccessPolicy", - xmlName: "AccessPolicy", + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", type: { - name: "Composite", - className: "AccessPolicy" - } - } - } - } -}; -const AccessPolicy = { - serializedName: "AccessPolicy", - type: { - name: "Composite", - className: "AccessPolicy", - modelProperties: { - startsOn: { - serializedName: "Start", - xmlName: "Start", + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - expiresOn: { - serializedName: "Expiry", - xmlName: "Expiry", + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", type: { - name: "String" - } + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, - permissions: { - serializedName: "Permission", - xmlName: "Permission", + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", type: { - name: "String" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; -const ListBlobsFlatSegmentResponse = { - serializedName: "ListBlobsFlatSegmentResponse", +const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", xmlName: "EnumerationResults", type: { name: "Composite", - className: "ListBlobsFlatSegmentResponse", + className: "ListBlobsHierarchySegmentResponse", modelProperties: { serviceEndpoint: { serializedName: "ServiceEndpoint", @@ -12761,8 +9680,8 @@ const ListBlobsFlatSegmentResponse = { xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, containerName: { serializedName: "ContainerName", @@ -12770,55 +9689,76 @@ const ListBlobsFlatSegmentResponse = { xmlName: "ContainerName", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { - name: "String" - } + name: "String", + }, }, marker: { serializedName: "Marker", xmlName: "Marker", type: { - name: "String" - } + name: "String", + }, }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { - name: "Number" - } + name: "Number", + }, + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", + type: { + name: "String", + }, }, segment: { serializedName: "Segment", xmlName: "Blobs", type: { name: "Composite", - className: "BlobFlatListSegment" - } + className: "BlobHierarchyListSegment", + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobFlatListSegment = { - serializedName: "BlobFlatListSegment", +const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", xmlName: "Blobs", type: { name: "Composite", - className: "BlobFlatListSegment", + className: "BlobHierarchyListSegment", modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix", + }, + }, + }, + }, blobItems: { serializedName: "BlobItems", required: true, @@ -12829,2455 +9769,1907 @@ const BlobFlatListSegment = { element: { type: { name: "Composite", - className: "BlobItemInternal" - } - } - } - } - } - } + className: "BlobItemInternal", + }, + }, + }, + }, + }, + }, }; -const BlobItemInternal = { - serializedName: "BlobItemInternal", - xmlName: "Blob", +const BlobPrefix = { + serializedName: "BlobPrefix", type: { name: "Composite", - className: "BlobItemInternal", + className: "BlobPrefix", modelProperties: { name: { serializedName: "Name", xmlName: "Name", type: { name: "Composite", - className: "BlobName" - } + className: "BlobName", + }, }, - deleted: { - serializedName: "Deleted", - required: true, - xmlName: "Deleted", + }, + }, +}; +const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", type: { - name: "Boolean" - } + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, }, - snapshot: { - serializedName: "Snapshot", - required: true, - xmlName: "Snapshot", + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", type: { - name: "String" - } + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, }, - versionId: { - serializedName: "VersionId", - xmlName: "VersionId", + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", type: { - name: "String" - } + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, }, - isCurrentVersion: { - serializedName: "IsCurrentVersion", - xmlName: "IsCurrentVersion", + }, + }, +}; +const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", type: { - name: "Boolean" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block", + }, + }, + }, }, - properties: { - serializedName: "Properties", - xmlName: "Properties", - type: { - name: "Composite", - className: "BlobPropertiesInternal" - } - }, - metadata: { - serializedName: "Metadata", - xmlName: "Metadata", + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", type: { - name: "Dictionary", - value: { type: { name: "String" } } - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block", + }, + }, + }, }, - blobTags: { - serializedName: "BlobTags", - xmlName: "Tags", + }, + }, +}; +const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", type: { - name: "Composite", - className: "BlobTags" - } + name: "String", + }, }, - objectReplicationMetadata: { - serializedName: "ObjectReplicationMetadata", - xmlName: "OrMetadata", + size: { + serializedName: "Size", + required: true, + xmlName: "Size", type: { - name: "Dictionary", - value: { type: { name: "String" } } - } + name: "Number", + }, }, - hasVersionsOnly: { - serializedName: "HasVersionsOnly", - xmlName: "HasVersionsOnly", - type: { - name: "Boolean" - } - } - } - } + }, + }, }; -const BlobName = { - serializedName: "BlobName", +const PageList = { + serializedName: "PageList", type: { name: "Composite", - className: "BlobName", + className: "PageList", modelProperties: { - encoded: { - serializedName: "Encoded", - xmlName: "Encoded", - xmlIsAttribute: true, + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", type: { - name: "Boolean" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange", + }, + }, + }, }, - content: { - serializedName: "content", - xmlName: "content", - xmlIsMsText: true, + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", type: { - name: "String" - } - } - } - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange", + }, + }, + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, }; -const BlobPropertiesInternal = { - serializedName: "BlobPropertiesInternal", - xmlName: "Properties", +const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", type: { name: "Composite", - className: "BlobPropertiesInternal", + className: "PageRange", modelProperties: { - createdOn: { - serializedName: "Creation-Time", - xmlName: "Creation-Time", - type: { - name: "DateTimeRfc1123" - } - }, - lastModified: { - serializedName: "Last-Modified", + start: { + serializedName: "Start", required: true, - xmlName: "Last-Modified", + xmlName: "Start", type: { - name: "DateTimeRfc1123" - } + name: "Number", + }, }, - etag: { - serializedName: "Etag", + end: { + serializedName: "End", required: true, - xmlName: "Etag", - type: { - name: "String" - } - }, - contentLength: { - serializedName: "Content-Length", - xmlName: "Content-Length", + xmlName: "End", type: { - name: "Number" - } + name: "Number", + }, }, - contentType: { - serializedName: "Content-Type", - xmlName: "Content-Type", + }, + }, +}; +const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", type: { - name: "String" - } + name: "Number", + }, }, - contentEncoding: { - serializedName: "Content-Encoding", - xmlName: "Content-Encoding", + end: { + serializedName: "End", + required: true, + xmlName: "End", type: { - name: "String" - } + name: "Number", + }, }, - contentLanguage: { - serializedName: "Content-Language", - xmlName: "Content-Language", + }, + }, +}; +const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", type: { - name: "String" - } + name: "String", + }, }, - contentMD5: { - serializedName: "Content-MD5", - xmlName: "Content-MD5", + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", type: { - name: "ByteArray" - } + name: "String", + }, }, - contentDisposition: { - serializedName: "Content-Disposition", - xmlName: "Content-Disposition", + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", type: { - name: "String" - } + name: "Composite", + className: "QuerySerialization", + }, }, - cacheControl: { - serializedName: "Cache-Control", - xmlName: "Cache-Control", + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", type: { - name: "String" - } + name: "Composite", + className: "QuerySerialization", + }, }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + }, + }, +}; +const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", type: { - name: "Number" - } + name: "Composite", + className: "QueryFormat", + }, }, - blobType: { - serializedName: "BlobType", - xmlName: "BlobType", + }, + }, +}; +const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["delimited", "json", "arrow", "parquet"], + }, }, - leaseStatus: { - serializedName: "LeaseStatus", - xmlName: "LeaseStatus", + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } + name: "Composite", + className: "DelimitedTextConfiguration", + }, }, - leaseState: { - serializedName: "LeaseState", - xmlName: "LeaseState", + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } + name: "Composite", + className: "JsonTextConfiguration", + }, }, - leaseDuration: { - serializedName: "LeaseDuration", - xmlName: "LeaseDuration", + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } + name: "Composite", + className: "ArrowConfiguration", + }, }, - copyId: { - serializedName: "CopyId", - xmlName: "CopyId", + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", type: { - name: "String" - } + name: "Dictionary", + value: { type: { name: "any" } }, + }, }, - copyStatus: { - serializedName: "CopyStatus", - xmlName: "CopyStatus", + }, + }, +}; +const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + modelProperties: { + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + name: "String", + }, }, - copySource: { - serializedName: "CopySource", - xmlName: "CopySource", + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", type: { - name: "String" - } + name: "String", + }, }, - copyProgress: { - serializedName: "CopyProgress", - xmlName: "CopyProgress", + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", type: { - name: "String" - } + name: "String", + }, }, - copyCompletedOn: { - serializedName: "CopyCompletionTime", - xmlName: "CopyCompletionTime", + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - copyStatusDescription: { - serializedName: "CopyStatusDescription", - xmlName: "CopyStatusDescription", + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", type: { - name: "String" - } + name: "Boolean", + }, }, - serverEncrypted: { - serializedName: "ServerEncrypted", - xmlName: "ServerEncrypted", + }, + }, +}; +const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", type: { - name: "Boolean" - } + name: "String", + }, }, - incrementalCopy: { - serializedName: "IncrementalCopy", - xmlName: "IncrementalCopy", + }, + }, +}; +const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + modelProperties: { + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", type: { - name: "Boolean" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField", + }, + }, + }, }, - destinationSnapshot: { - serializedName: "DestinationSnapshot", - xmlName: "DestinationSnapshot", + }, + }, +}; +const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", + type: { + name: "Composite", + className: "ArrowField", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", type: { - name: "String" - } + name: "String", + }, }, - deletedOn: { - serializedName: "DeletedTime", - xmlName: "DeletedTime", + name: { + serializedName: "Name", + xmlName: "Name", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - remainingRetentionDays: { - serializedName: "RemainingRetentionDays", - xmlName: "RemainingRetentionDays", + precision: { + serializedName: "Precision", + xmlName: "Precision", type: { - name: "Number" - } + name: "Number", + }, }, - accessTier: { - serializedName: "AccessTier", - xmlName: "AccessTier", + scale: { + serializedName: "Scale", + xmlName: "Scale", type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] - } + name: "Number", + }, }, - accessTierInferred: { - serializedName: "AccessTierInferred", - xmlName: "AccessTierInferred", + }, + }, +}; +const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Boolean" - } + name: "String", + }, }, - archiveStatus: { - serializedName: "ArchiveStatus", - xmlName: "ArchiveStatus", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "Enum", - allowedValues: [ - "rehydrate-pending-to-hot", - "rehydrate-pending-to-cool", - "rehydrate-pending-to-cold" - ] - } + name: "String", + }, }, - customerProvidedKeySha256: { - serializedName: "CustomerProvidedKeySha256", - xmlName: "CustomerProvidedKeySha256", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, - encryptionScope: { - serializedName: "EncryptionScope", - xmlName: "EncryptionScope", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "String" - } + name: "String", + }, }, - accessTierChangedOn: { - serializedName: "AccessTierChangeTime", - xmlName: "AccessTierChangeTime", + }, + }, +}; +const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "DateTimeRfc1123" - } - }, - tagCount: { - serializedName: "TagCount", - xmlName: "TagCount", - type: { - name: "Number" - } - }, - expiresOn: { - serializedName: "Expiry-Time", - xmlName: "Expiry-Time", - type: { - name: "DateTimeRfc1123" - } - }, - isSealed: { - serializedName: "Sealed", - xmlName: "Sealed", - type: { - name: "Boolean" - } + name: "String", + }, }, - rehydratePriority: { - serializedName: "RehydratePriority", - xmlName: "RehydratePriority", + }, + }, +}; +const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Enum", - allowedValues: ["High", "Standard"] - } + name: "String", + }, }, - lastAccessedOn: { - serializedName: "LastAccessTime", - xmlName: "LastAccessTime", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - immutabilityPolicyExpiresOn: { - serializedName: "ImmutabilityPolicyUntilDate", - xmlName: "ImmutabilityPolicyUntilDate", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - immutabilityPolicyMode: { - serializedName: "ImmutabilityPolicyMode", - xmlName: "ImmutabilityPolicyMode", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + name: "String", + }, }, - legalHold: { - serializedName: "LegalHold", - xmlName: "LegalHold", - type: { - name: "Boolean" - } - } - } - } + }, + }, }; -const ListBlobsHierarchySegmentResponse = { - serializedName: "ListBlobsHierarchySegmentResponse", - xmlName: "EnumerationResults", +const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", type: { name: "Composite", - className: "ListBlobsHierarchySegmentResponse", + className: "ServiceGetPropertiesExceptionHeaders", modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", - required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - containerName: { - serializedName: "ContainerName", - required: true, - xmlName: "ContainerName", - xmlIsAttribute: true, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "String" - } + name: "String", + }, }, - prefix: { - serializedName: "Prefix", - xmlName: "Prefix", + }, + }, +}; +const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, - marker: { - serializedName: "Marker", - xmlName: "Marker", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, - maxPageSize: { - serializedName: "MaxResults", - xmlName: "MaxResults", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "Number" - } + name: "String", + }, }, - delimiter: { - serializedName: "Delimiter", - xmlName: "Delimiter", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - segment: { - serializedName: "Segment", - xmlName: "Blobs", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Composite", - className: "BlobHierarchyListSegment" - } + name: "String", + }, }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } - } + }, + }, }; -const BlobHierarchyListSegment = { - serializedName: "BlobHierarchyListSegment", - xmlName: "Blobs", +const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", type: { name: "Composite", - className: "BlobHierarchyListSegment", + className: "ServiceGetStatisticsExceptionHeaders", modelProperties: { - blobPrefixes: { - serializedName: "BlobPrefixes", - xmlName: "BlobPrefixes", - xmlElementName: "BlobPrefix", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobPrefix" - } - } - } + name: "String", + }, }, - blobItems: { - serializedName: "BlobItems", - required: true, - xmlName: "BlobItems", - xmlElementName: "Blob", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobItemInternal" - } - } - } - } - } - } + }, + }, }; -const BlobPrefix = { - serializedName: "BlobPrefix", +const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", type: { name: "Composite", - className: "BlobPrefix", + className: "ServiceListContainersSegmentHeaders", modelProperties: { - name: { - serializedName: "Name", - xmlName: "Name", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Composite", - className: "BlobName" - } - } - } - } -}; -const BlockLookupList = { - serializedName: "BlockLookupList", - xmlName: "BlockList", - type: { - name: "Composite", - className: "BlockLookupList", - modelProperties: { - committed: { - serializedName: "Committed", - xmlName: "Committed", - xmlElementName: "Committed", + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } + name: "String", + }, }, - uncommitted: { - serializedName: "Uncommitted", - xmlName: "Uncommitted", - xmlElementName: "Uncommitted", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } + name: "String", + }, }, - latest: { - serializedName: "Latest", - xmlName: "Latest", - xmlElementName: "Latest", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlockList = { - serializedName: "BlockList", +const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", type: { name: "Composite", - className: "BlockList", + className: "ServiceListContainersSegmentExceptionHeaders", modelProperties: { - committedBlocks: { - serializedName: "CommittedBlocks", - xmlName: "CommittedBlocks", - xmlIsWrapped: true, - xmlElementName: "Block", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "Block" - } - } - } + name: "String", + }, }, - uncommittedBlocks: { - serializedName: "UncommittedBlocks", - xmlName: "UncommittedBlocks", - xmlIsWrapped: true, - xmlElementName: "Block", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "Block" - } - } - } - } - } - } + }, + }, }; -const Block = { - serializedName: "Block", +const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", type: { name: "Composite", - className: "Block", + className: "ServiceGetUserDelegationKeyHeaders", modelProperties: { - name: { - serializedName: "Name", - required: true, - xmlName: "Name", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, - size: { - serializedName: "Size", - required: true, - xmlName: "Size", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "Number" - } - } - } - } -}; -const PageList = { - serializedName: "PageList", - type: { - name: "Composite", - className: "PageList", - modelProperties: { - pageRange: { - serializedName: "PageRange", - xmlName: "PageRange", - xmlElementName: "PageRange", + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "PageRange" - } - } - } + name: "String", + }, }, - clearRange: { - serializedName: "ClearRange", - xmlName: "ClearRange", - xmlElementName: "ClearRange", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ClearRange" - } - } - } + name: "DateTimeRfc1123", + }, }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const PageRange = { - serializedName: "PageRange", - xmlName: "PageRange", +const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", type: { name: "Composite", - className: "PageRange", + className: "ServiceGetUserDelegationKeyExceptionHeaders", modelProperties: { - start: { - serializedName: "Start", - required: true, - xmlName: "Start", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Number" - } + name: "String", + }, }, - end: { - serializedName: "End", - required: true, - xmlName: "End", - type: { - name: "Number" - } - } - } - } + }, + }, }; -const ClearRange = { - serializedName: "ClearRange", - xmlName: "ClearRange", +const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", type: { name: "Composite", - className: "ClearRange", + className: "ServiceGetAccountInfoHeaders", modelProperties: { - start: { - serializedName: "Start", - required: true, - xmlName: "Start", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Number" - } + name: "String", + }, }, - end: { - serializedName: "End", - required: true, - xmlName: "End", - type: { - name: "Number" - } - } - } - } -}; -const QueryRequest = { - serializedName: "QueryRequest", - xmlName: "QueryRequest", - type: { - name: "Composite", - className: "QueryRequest", - modelProperties: { - queryType: { - serializedName: "QueryType", - required: true, - xmlName: "QueryType", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, - expression: { - serializedName: "Expression", - required: true, - xmlName: "Expression", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, - inputSerialization: { - serializedName: "InputSerialization", - xmlName: "InputSerialization", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "Composite", - className: "QuerySerialization" - } + name: "DateTimeRfc1123", + }, }, - outputSerialization: { - serializedName: "OutputSerialization", - xmlName: "OutputSerialization", + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", type: { - name: "Composite", - className: "QuerySerialization" - } - } - } - } -}; -const QuerySerialization = { - serializedName: "QuerySerialization", - type: { - name: "Composite", - className: "QuerySerialization", - modelProperties: { - format: { - serializedName: "Format", - xmlName: "Format", - type: { - name: "Composite", - className: "QueryFormat" - } - } - } - } -}; -const QueryFormat = { - serializedName: "QueryFormat", - type: { - name: "Composite", - className: "QueryFormat", - modelProperties: { - type: { - serializedName: "Type", - required: true, - xmlName: "Type", + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS", + ], + }, + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", type: { name: "Enum", - allowedValues: ["delimited", "json", "arrow", "parquet"] - } - }, - delimitedTextConfiguration: { - serializedName: "DelimitedTextConfiguration", - xmlName: "DelimitedTextConfiguration", - type: { - name: "Composite", - className: "DelimitedTextConfiguration" - } - }, - jsonTextConfiguration: { - serializedName: "JsonTextConfiguration", - xmlName: "JsonTextConfiguration", - type: { - name: "Composite", - className: "JsonTextConfiguration" - } - }, - arrowConfiguration: { - serializedName: "ArrowConfiguration", - xmlName: "ArrowConfiguration", - type: { - name: "Composite", - className: "ArrowConfiguration" - } - }, - parquetTextConfiguration: { - serializedName: "ParquetTextConfiguration", - xmlName: "ParquetTextConfiguration", - type: { - name: "any" - } - } - } - } -}; -const DelimitedTextConfiguration = { - serializedName: "DelimitedTextConfiguration", - xmlName: "DelimitedTextConfiguration", - type: { - name: "Composite", - className: "DelimitedTextConfiguration", - modelProperties: { - columnSeparator: { - serializedName: "ColumnSeparator", - xmlName: "ColumnSeparator", - type: { - name: "String" - } - }, - fieldQuote: { - serializedName: "FieldQuote", - xmlName: "FieldQuote", - type: { - name: "String" - } + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage", + ], + }, }, - recordSeparator: { - serializedName: "RecordSeparator", - xmlName: "RecordSeparator", + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", type: { - name: "String" - } + name: "Boolean", + }, }, - escapeChar: { - serializedName: "EscapeChar", - xmlName: "EscapeChar", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "String" - } + name: "String", + }, }, - headersPresent: { - serializedName: "HeadersPresent", - xmlName: "HasHeaders", - type: { - name: "Boolean" - } - } - } - } -}; -const JsonTextConfiguration = { - serializedName: "JsonTextConfiguration", - xmlName: "JsonTextConfiguration", - type: { - name: "Composite", - className: "JsonTextConfiguration", - modelProperties: { - recordSeparator: { - serializedName: "RecordSeparator", - xmlName: "RecordSeparator", - type: { - name: "String" - } - } - } - } -}; -const ArrowConfiguration = { - serializedName: "ArrowConfiguration", - xmlName: "ArrowConfiguration", - type: { - name: "Composite", - className: "ArrowConfiguration", - modelProperties: { - schema: { - serializedName: "Schema", - required: true, - xmlName: "Schema", - xmlIsWrapped: true, - xmlElementName: "Field", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ArrowField" - } - } - } - } - } - } + }, + }, }; -const ArrowField = { - serializedName: "ArrowField", - xmlName: "Field", +const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", type: { name: "Composite", - className: "ArrowField", + className: "ServiceGetAccountInfoExceptionHeaders", modelProperties: { - type: { - serializedName: "Type", - required: true, - xmlName: "Type", - type: { - name: "String" - } - }, - name: { - serializedName: "Name", - xmlName: "Name", - type: { - name: "String" - } - }, - precision: { - serializedName: "Precision", - xmlName: "Precision", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Number" - } + name: "String", + }, }, - scale: { - serializedName: "Scale", - xmlName: "Scale", - type: { - name: "Number" - } - } - } - } + }, + }, }; -const ServiceSetPropertiesHeaders = { - serializedName: "Service_setPropertiesHeaders", +const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", type: { name: "Composite", - className: "ServiceSetPropertiesHeaders", + className: "ServiceSubmitBatchHeaders", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + contentType: { + serializedName: "content-type", + xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceSetPropertiesExceptionHeaders = { - serializedName: "Service_setPropertiesExceptionHeaders", +const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", type: { name: "Composite", - className: "ServiceSetPropertiesExceptionHeaders", + className: "ServiceSubmitBatchExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceGetPropertiesHeaders = { - serializedName: "Service_getPropertiesHeaders", +const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", type: { name: "Composite", - className: "ServiceGetPropertiesHeaders", + className: "ServiceFilterBlobsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceGetPropertiesExceptionHeaders = { - serializedName: "Service_getPropertiesExceptionHeaders", +const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", type: { name: "Composite", - className: "ServiceGetPropertiesExceptionHeaders", + className: "ServiceFilterBlobsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceGetStatisticsHeaders = { - serializedName: "Service_getStatisticsHeaders", +const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", type: { name: "Composite", - className: "ServiceGetStatisticsHeaders", + className: "ContainerCreateHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceGetStatisticsExceptionHeaders = { - serializedName: "Service_getStatisticsExceptionHeaders", +const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", type: { name: "Composite", - className: "ServiceGetStatisticsExceptionHeaders", + className: "ContainerCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceListContainersSegmentHeaders = { - serializedName: "Service_listContainersSegmentHeaders", +const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", type: { name: "Composite", - className: "ServiceListContainersSegmentHeaders", + className: "ContainerGetPropertiesHeaders", modelProperties: { + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean", + }, + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean", + }, + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String", + }, + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean", + }, + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceListContainersSegmentExceptionHeaders = { - serializedName: "Service_listContainersSegmentExceptionHeaders", +const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", type: { name: "Composite", - className: "ServiceListContainersSegmentExceptionHeaders", + className: "ContainerGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceGetUserDelegationKeyHeaders = { - serializedName: "Service_getUserDelegationKeyHeaders", +const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", type: { name: "Composite", - className: "ServiceGetUserDelegationKeyHeaders", + className: "ContainerDeleteHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceGetUserDelegationKeyExceptionHeaders = { - serializedName: "Service_getUserDelegationKeyExceptionHeaders", +const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", type: { name: "Composite", - className: "ServiceGetUserDelegationKeyExceptionHeaders", + className: "ContainerDeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceGetAccountInfoHeaders = { - serializedName: "Service_getAccountInfoHeaders", +const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", type: { name: "Composite", - className: "ServiceGetAccountInfoHeaders", + className: "ContainerSetMetadataHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - skuName: { - serializedName: "x-ms-sku-name", - xmlName: "x-ms-sku-name", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Enum", - allowedValues: [ - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Premium_LRS" - ] - } + name: "String", + }, }, - accountKind: { - serializedName: "x-ms-account-kind", - xmlName: "x-ms-account-kind", - type: { - name: "Enum", - allowedValues: [ - "Storage", - "BlobStorage", - "StorageV2", - "FileStorage", - "BlockBlobStorage" - ] - } - }, - isHierarchicalNamespaceEnabled: { - serializedName: "x-ms-is-hns-enabled", - xmlName: "x-ms-is-hns-enabled", - type: { - name: "Boolean" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const ServiceGetAccountInfoExceptionHeaders = { - serializedName: "Service_getAccountInfoExceptionHeaders", +const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", type: { name: "Composite", - className: "ServiceGetAccountInfoExceptionHeaders", + className: "ContainerSetMetadataExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceSubmitBatchHeaders = { - serializedName: "Service_submitBatchHeaders", +const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", type: { name: "Composite", - className: "ServiceSubmitBatchHeaders", + className: "ContainerGetAccessPolicyHeaders", modelProperties: { - contentType: { - serializedName: "content-type", - xmlName: "content-type", + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", type: { - name: "String" - } + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceSubmitBatchExceptionHeaders = { - serializedName: "Service_submitBatchExceptionHeaders", +const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", type: { name: "Composite", - className: "ServiceSubmitBatchExceptionHeaders", + className: "ContainerGetAccessPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceFilterBlobsHeaders = { - serializedName: "Service_filterBlobsHeaders", +const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", type: { name: "Composite", - className: "ServiceFilterBlobsHeaders", + className: "ContainerSetAccessPolicyHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ServiceFilterBlobsExceptionHeaders = { - serializedName: "Service_filterBlobsExceptionHeaders", +const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", type: { name: "Composite", - className: "ServiceFilterBlobsExceptionHeaders", + className: "ContainerSetAccessPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ContainerCreateHeaders = { - serializedName: "Container_createHeaders", +const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", type: { name: "Composite", - className: "ContainerCreateHeaders", + className: "ContainerRestoreHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ContainerCreateExceptionHeaders = { - serializedName: "Container_createExceptionHeaders", +const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", type: { name: "Composite", - className: "ContainerCreateExceptionHeaders", + className: "ContainerRestoreExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ContainerGetPropertiesHeaders = { - serializedName: "Container_getPropertiesHeaders", +const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", type: { name: "Composite", - className: "ContainerGetPropertiesHeaders", + className: "ContainerRenameHeaders", modelProperties: { - metadata: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - }, - headerCollectionPrefix: "x-ms-meta-" - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", - type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } - }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", - type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - blobPublicAccess: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } + name: "DateTimeRfc1123", + }, }, - hasImmutabilityPolicy: { - serializedName: "x-ms-has-immutability-policy", - xmlName: "x-ms-has-immutability-policy", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Boolean" - } + name: "String", + }, }, - hasLegalHold: { - serializedName: "x-ms-has-legal-hold", - xmlName: "x-ms-has-legal-hold", + }, + }, +}; +const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Boolean" - } + name: "String", + }, }, - defaultEncryptionScope: { - serializedName: "x-ms-default-encryption-scope", - xmlName: "x-ms-default-encryption-scope", + }, + }, +}; +const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, - denyEncryptionScopeOverride: { - serializedName: "x-ms-deny-encryption-scope-override", - xmlName: "x-ms-deny-encryption-scope-override", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "Boolean" - } + name: "String", + }, }, - isImmutableStorageWithVersioningEnabled: { - serializedName: "x-ms-immutable-storage-with-versioning-enabled", - xmlName: "x-ms-immutable-storage-with-versioning-enabled", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "Boolean" - } + name: "String", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const ContainerGetPropertiesExceptionHeaders = { - serializedName: "Container_getPropertiesExceptionHeaders", +const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", type: { name: "Composite", - className: "ContainerGetPropertiesExceptionHeaders", + className: "ContainerSubmitBatchExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ContainerDeleteHeaders = { - serializedName: "Container_deleteHeaders", +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", type: { name: "Composite", - className: "ContainerDeleteHeaders", + className: "ContainerFilterBlobsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const ContainerDeleteExceptionHeaders = { - serializedName: "Container_deleteExceptionHeaders", +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", type: { name: "Composite", - className: "ContainerDeleteExceptionHeaders", + className: "ContainerFilterBlobsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ContainerSetMetadataHeaders = { - serializedName: "Container_setMetadataHeaders", +const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", type: { name: "Composite", - className: "ContainerSetMetadataHeaders", + className: "ContainerAcquireLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const ContainerSetMetadataExceptionHeaders = { - serializedName: "Container_setMetadataExceptionHeaders", +const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", type: { name: "Composite", - className: "ContainerSetMetadataExceptionHeaders", + className: "ContainerAcquireLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ContainerGetAccessPolicyHeaders = { - serializedName: "Container_getAccessPolicyHeaders", +const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", type: { name: "Composite", - className: "ContainerGetAccessPolicyHeaders", + className: "ContainerReleaseLeaseHeaders", modelProperties: { - blobPublicAccess: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } - }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const ContainerGetAccessPolicyExceptionHeaders = { - serializedName: "Container_getAccessPolicyExceptionHeaders", +const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", type: { name: "Composite", - className: "ContainerGetAccessPolicyExceptionHeaders", + className: "ContainerReleaseLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ContainerSetAccessPolicyHeaders = { - serializedName: "Container_setAccessPolicyHeaders", +const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", type: { name: "Composite", - className: "ContainerSetAccessPolicyHeaders", + className: "ContainerRenewLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerSetAccessPolicyExceptionHeaders = { - serializedName: "Container_setAccessPolicyExceptionHeaders", - type: { - name: "Composite", - className: "ContainerSetAccessPolicyExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerRestoreHeaders = { - serializedName: "Container_restoreHeaders", - type: { - name: "Composite", - className: "ContainerRestoreHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerRestoreExceptionHeaders = { - serializedName: "Container_restoreExceptionHeaders", - type: { - name: "Composite", - className: "ContainerRestoreExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerRenameHeaders = { - serializedName: "Container_renameHeaders", - type: { - name: "Composite", - className: "ContainerRenameHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerRenameExceptionHeaders = { - serializedName: "Container_renameExceptionHeaders", - type: { - name: "Composite", - className: "ContainerRenameExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerSubmitBatchHeaders = { - serializedName: "Container_submitBatchHeaders", - type: { - name: "Composite", - className: "ContainerSubmitBatchHeaders", - modelProperties: { - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - } - } - } -}; -const ContainerSubmitBatchExceptionHeaders = { - serializedName: "Container_submitBatchExceptionHeaders", - type: { - name: "Composite", - className: "ContainerSubmitBatchExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerFilterBlobsHeaders = { - serializedName: "Container_filterBlobsHeaders", - type: { - name: "Composite", - className: "ContainerFilterBlobsHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } - } - } -}; -const ContainerFilterBlobsExceptionHeaders = { - serializedName: "Container_filterBlobsExceptionHeaders", - type: { - name: "Composite", - className: "ContainerFilterBlobsExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerAcquireLeaseHeaders = { - serializedName: "Container_acquireLeaseHeaders", - type: { - name: "Composite", - className: "ContainerAcquireLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } - } - } -}; -const ContainerAcquireLeaseExceptionHeaders = { - serializedName: "Container_acquireLeaseExceptionHeaders", - type: { - name: "Composite", - className: "ContainerAcquireLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerReleaseLeaseHeaders = { - serializedName: "Container_releaseLeaseHeaders", - type: { - name: "Composite", - className: "ContainerReleaseLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } - } - } -}; -const ContainerReleaseLeaseExceptionHeaders = { - serializedName: "Container_releaseLeaseExceptionHeaders", - type: { - name: "Composite", - className: "ContainerReleaseLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerRenewLeaseHeaders = { - serializedName: "Container_renewLeaseHeaders", - type: { - name: "Composite", - className: "ContainerRenewLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } - } - } + }, + }, }; const ContainerRenewLeaseExceptionHeaders = { serializedName: "Container_renewLeaseExceptionHeaders", @@ -15289,11 +11681,11 @@ const ContainerRenewLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerBreakLeaseHeaders = { serializedName: "Container_breakLeaseHeaders", @@ -15305,53 +11697,53 @@ const ContainerBreakLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseTime: { serializedName: "x-ms-lease-time", xmlName: "x-ms-lease-time", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const ContainerBreakLeaseExceptionHeaders = { serializedName: "Container_breakLeaseExceptionHeaders", @@ -15363,11 +11755,11 @@ const ContainerBreakLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerChangeLeaseHeaders = { serializedName: "Container_changeLeaseHeaders", @@ -15379,53 +11771,53 @@ const ContainerChangeLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const ContainerChangeLeaseExceptionHeaders = { serializedName: "Container_changeLeaseExceptionHeaders", @@ -15437,11 +11829,11 @@ const ContainerChangeLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerListBlobFlatSegmentHeaders = { serializedName: "Container_listBlobFlatSegmentHeaders", @@ -15453,46 +11845,46 @@ const ContainerListBlobFlatSegmentHeaders = { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "Container_listBlobFlatSegmentExceptionHeaders", @@ -15504,11 +11896,11 @@ const ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerListBlobHierarchySegmentHeaders = { serializedName: "Container_listBlobHierarchySegmentHeaders", @@ -15520,46 +11912,46 @@ const ContainerListBlobHierarchySegmentHeaders = { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", @@ -15571,14 +11963,14 @@ const ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } -}; -const ContainerGetAccountInfoHeaders = { - serializedName: "Container_getAccountInfoHeaders", + name: "String", + }, + }, + }, + }, +}; +const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", type: { name: "Composite", className: "ContainerGetAccountInfoHeaders", @@ -15587,29 +11979,29 @@ const ContainerGetAccountInfoHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, skuName: { serializedName: "x-ms-sku-name", @@ -15621,9 +12013,9 @@ const ContainerGetAccountInfoHeaders = { "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", - "Premium_LRS" - ] - } + "Premium_LRS", + ], + }, }, accountKind: { serializedName: "x-ms-account-kind", @@ -15635,12 +12027,12 @@ const ContainerGetAccountInfoHeaders = { "BlobStorage", "StorageV2", "FileStorage", - "BlockBlobStorage" - ] - } - } - } - } + "BlockBlobStorage", + ], + }, + }, + }, + }, }; const ContainerGetAccountInfoExceptionHeaders = { serializedName: "Container_getAccountInfoExceptionHeaders", @@ -15652,11 +12044,11 @@ const ContainerGetAccountInfoExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobDownloadHeaders = { serializedName: "Blob_downloadHeaders", @@ -15668,169 +12060,169 @@ const BlobDownloadHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, createdOn: { serializedName: "x-ms-creation-time", xmlName: "x-ms-creation-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" }, objectReplicationPolicyId: { serializedName: "x-ms-or-policy-id", xmlName: "x-ms-or-policy-id", type: { - name: "String" - } + name: "String", + }, }, objectReplicationRules: { serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", xmlName: "x-ms-or", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-or-" }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, contentRange: { serializedName: "content-range", xmlName: "content-range", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { - name: "String" - } + name: "String", + }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, copyCompletedOn: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { - name: "String" - } + name: "String", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { - name: "String" - } + name: "String", + }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -15842,161 +12234,161 @@ const BlobDownloadHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, isCurrentVersion: { serializedName: "x-ms-is-current-version", xmlName: "x-ms-is-current-version", type: { - name: "Boolean" - } + name: "Boolean", + }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, blobContentMD5: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, tagCount: { serializedName: "x-ms-tag-count", xmlName: "x-ms-tag-count", type: { - name: "Number" - } + name: "Number", + }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { - name: "Boolean" - } + name: "Boolean", + }, }, lastAccessed: { serializedName: "x-ms-last-access-time", xmlName: "x-ms-last-access-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiresOn: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } + name: "String", + }, }, contentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } - } - } - } + name: "ByteArray", + }, + }, + }, + }, }; const BlobDownloadExceptionHeaders = { serializedName: "Blob_downloadExceptionHeaders", @@ -16008,11 +12400,11 @@ const BlobDownloadExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobGetPropertiesHeaders = { serializedName: "Blob_getPropertiesHeaders", @@ -16024,113 +12416,113 @@ const BlobGetPropertiesHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, createdOn: { serializedName: "x-ms-creation-time", xmlName: "x-ms-creation-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" }, objectReplicationPolicyId: { serializedName: "x-ms-or-policy-id", xmlName: "x-ms-or-policy-id", type: { - name: "String" - } + name: "String", + }, }, objectReplicationRules: { serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", xmlName: "x-ms-or", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-or-" }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, copyCompletedOn: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { - name: "String" - } + name: "String", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { - name: "String" - } + name: "String", + }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, isIncrementalCopy: { serializedName: "x-ms-incremental-copy", xmlName: "x-ms-incremental-copy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, destinationSnapshot: { serializedName: "x-ms-copy-destination-snapshot", xmlName: "x-ms-copy-destination-snapshot", type: { - name: "String" - } + name: "String", + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -16142,253 +12534,253 @@ const BlobGetPropertiesHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { - name: "String" - } + name: "String", + }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { - name: "String" - } + name: "String", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, accessTier: { serializedName: "x-ms-access-tier", xmlName: "x-ms-access-tier", type: { - name: "String" - } + name: "String", + }, }, accessTierInferred: { serializedName: "x-ms-access-tier-inferred", xmlName: "x-ms-access-tier-inferred", type: { - name: "Boolean" - } + name: "Boolean", + }, }, archiveStatus: { serializedName: "x-ms-archive-status", xmlName: "x-ms-archive-status", type: { - name: "String" - } + name: "String", + }, }, accessTierChangedOn: { serializedName: "x-ms-access-tier-change-time", xmlName: "x-ms-access-tier-change-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, isCurrentVersion: { serializedName: "x-ms-is-current-version", xmlName: "x-ms-is-current-version", type: { - name: "Boolean" - } + name: "Boolean", + }, }, tagCount: { serializedName: "x-ms-tag-count", xmlName: "x-ms-tag-count", type: { - name: "Number" - } + name: "Number", + }, }, expiresOn: { serializedName: "x-ms-expiry-time", xmlName: "x-ms-expiry-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { - name: "Boolean" - } + name: "Boolean", + }, }, rehydratePriority: { serializedName: "x-ms-rehydrate-priority", xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", - allowedValues: ["High", "Standard"] - } + allowedValues: ["High", "Standard"], + }, }, lastAccessed: { serializedName: "x-ms-last-access-time", xmlName: "x-ms-last-access-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiresOn: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobGetPropertiesExceptionHeaders = { serializedName: "Blob_getPropertiesExceptionHeaders", @@ -16400,11 +12792,11 @@ const BlobGetPropertiesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobDeleteHeaders = { serializedName: "Blob_deleteHeaders", @@ -16416,39 +12808,39 @@ const BlobDeleteHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobDeleteExceptionHeaders = { serializedName: "Blob_deleteExceptionHeaders", @@ -16460,11 +12852,11 @@ const BlobDeleteExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobUndeleteHeaders = { serializedName: "Blob_undeleteHeaders", @@ -16476,39 +12868,39 @@ const BlobUndeleteHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobUndeleteExceptionHeaders = { serializedName: "Blob_undeleteExceptionHeaders", @@ -16520,11 +12912,11 @@ const BlobUndeleteExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetExpiryHeaders = { serializedName: "Blob_setExpiryHeaders", @@ -16536,46 +12928,46 @@ const BlobSetExpiryHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobSetExpiryExceptionHeaders = { serializedName: "Blob_setExpiryExceptionHeaders", @@ -16587,11 +12979,11 @@ const BlobSetExpiryExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetHttpHeadersHeaders = { serializedName: "Blob_setHttpHeadersHeaders", @@ -16603,60 +12995,60 @@ const BlobSetHttpHeadersHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetHttpHeadersExceptionHeaders = { serializedName: "Blob_setHttpHeadersExceptionHeaders", @@ -16668,11 +13060,11 @@ const BlobSetHttpHeadersExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetImmutabilityPolicyHeaders = { serializedName: "Blob_setImmutabilityPolicyHeaders", @@ -16684,47 +13076,47 @@ const BlobSetImmutabilityPolicyHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiry: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - } - } - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + }, + }, }; const BlobSetImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", @@ -16736,11 +13128,11 @@ const BlobSetImmutabilityPolicyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobDeleteImmutabilityPolicyHeaders = { serializedName: "Blob_deleteImmutabilityPolicyHeaders", @@ -16752,32 +13144,32 @@ const BlobDeleteImmutabilityPolicyHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobDeleteImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", @@ -16789,11 +13181,11 @@ const BlobDeleteImmutabilityPolicyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetLegalHoldHeaders = { serializedName: "Blob_setLegalHoldHeaders", @@ -16805,39 +13197,39 @@ const BlobSetLegalHoldHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; const BlobSetLegalHoldExceptionHeaders = { serializedName: "Blob_setLegalHoldExceptionHeaders", @@ -16849,11 +13241,11 @@ const BlobSetLegalHoldExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetMetadataHeaders = { serializedName: "Blob_setMetadataHeaders", @@ -16865,81 +13257,81 @@ const BlobSetMetadataHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetMetadataExceptionHeaders = { serializedName: "Blob_setMetadataExceptionHeaders", @@ -16951,11 +13343,11 @@ const BlobSetMetadataExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobAcquireLeaseHeaders = { serializedName: "Blob_acquireLeaseHeaders", @@ -16967,53 +13359,53 @@ const BlobAcquireLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobAcquireLeaseExceptionHeaders = { serializedName: "Blob_acquireLeaseExceptionHeaders", @@ -17025,11 +13417,11 @@ const BlobAcquireLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobReleaseLeaseHeaders = { serializedName: "Blob_releaseLeaseHeaders", @@ -17041,46 +13433,46 @@ const BlobReleaseLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobReleaseLeaseExceptionHeaders = { serializedName: "Blob_releaseLeaseExceptionHeaders", @@ -17092,11 +13484,11 @@ const BlobReleaseLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobRenewLeaseHeaders = { serializedName: "Blob_renewLeaseHeaders", @@ -17108,53 +13500,53 @@ const BlobRenewLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobRenewLeaseExceptionHeaders = { serializedName: "Blob_renewLeaseExceptionHeaders", @@ -17166,11 +13558,11 @@ const BlobRenewLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobChangeLeaseHeaders = { serializedName: "Blob_changeLeaseHeaders", @@ -17182,53 +13574,53 @@ const BlobChangeLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobChangeLeaseExceptionHeaders = { serializedName: "Blob_changeLeaseExceptionHeaders", @@ -17240,11 +13632,11 @@ const BlobChangeLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobBreakLeaseHeaders = { serializedName: "Blob_breakLeaseHeaders", @@ -17256,53 +13648,53 @@ const BlobBreakLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseTime: { serializedName: "x-ms-lease-time", xmlName: "x-ms-lease-time", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; const BlobBreakLeaseExceptionHeaders = { serializedName: "Blob_breakLeaseExceptionHeaders", @@ -17314,11 +13706,11 @@ const BlobBreakLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobCreateSnapshotHeaders = { serializedName: "Blob_createSnapshotHeaders", @@ -17330,74 +13722,74 @@ const BlobCreateSnapshotHeaders = { serializedName: "x-ms-snapshot", xmlName: "x-ms-snapshot", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobCreateSnapshotExceptionHeaders = { serializedName: "Blob_createSnapshotExceptionHeaders", @@ -17409,11 +13801,11 @@ const BlobCreateSnapshotExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobStartCopyFromURLHeaders = { serializedName: "Blob_startCopyFromURLHeaders", @@ -17425,75 +13817,75 @@ const BlobStartCopyFromURLHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobStartCopyFromURLExceptionHeaders = { serializedName: "Blob_startCopyFromURLExceptionHeaders", @@ -17505,11 +13897,11 @@ const BlobStartCopyFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobCopyFromURLHeaders = { serializedName: "Blob_copyFromURLHeaders", @@ -17521,96 +13913,96 @@ const BlobCopyFromURLHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { defaultValue: "success", isConstant: true, serializedName: "x-ms-copy-status", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobCopyFromURLExceptionHeaders = { serializedName: "Blob_copyFromURLExceptionHeaders", @@ -17622,11 +14014,11 @@ const BlobCopyFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobAbortCopyFromURLHeaders = { serializedName: "Blob_abortCopyFromURLHeaders", @@ -17638,39 +14030,39 @@ const BlobAbortCopyFromURLHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobAbortCopyFromURLExceptionHeaders = { serializedName: "Blob_abortCopyFromURLExceptionHeaders", @@ -17682,11 +14074,11 @@ const BlobAbortCopyFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetTierHeaders = { serializedName: "Blob_setTierHeaders", @@ -17698,32 +14090,32 @@ const BlobSetTierHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetTierExceptionHeaders = { serializedName: "Blob_setTierExceptionHeaders", @@ -17735,11 +14127,11 @@ const BlobSetTierExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobGetAccountInfoHeaders = { serializedName: "Blob_getAccountInfoHeaders", @@ -17751,29 +14143,29 @@ const BlobGetAccountInfoHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, skuName: { serializedName: "x-ms-sku-name", @@ -17785,9 +14177,9 @@ const BlobGetAccountInfoHeaders = { "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", - "Premium_LRS" - ] - } + "Premium_LRS", + ], + }, }, accountKind: { serializedName: "x-ms-account-kind", @@ -17799,12 +14191,12 @@ const BlobGetAccountInfoHeaders = { "BlobStorage", "StorageV2", "FileStorage", - "BlockBlobStorage" - ] - } - } - } - } + "BlockBlobStorage", + ], + }, + }, + }, + }, }; const BlobGetAccountInfoExceptionHeaders = { serializedName: "Blob_getAccountInfoExceptionHeaders", @@ -17816,11 +14208,11 @@ const BlobGetAccountInfoExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobQueryHeaders = { serializedName: "Blob_queryHeaders", @@ -17832,145 +14224,146 @@ const BlobQueryHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } - } + value: { type: { name: "String" } }, + }, }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, contentRange: { serializedName: "content-range", xmlName: "content-range", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { - name: "String" - } + name: "String", + }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, copyCompletionTime: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { - name: "String" - } + name: "String", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { - name: "String" - } + name: "String", + }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -17982,104 +14375,104 @@ const BlobQueryHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, blobContentMD5: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } + name: "String", + }, }, contentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } - } - } - } + name: "ByteArray", + }, + }, + }, + }, }; const BlobQueryExceptionHeaders = { serializedName: "Blob_queryExceptionHeaders", @@ -18091,11 +14484,11 @@ const BlobQueryExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobGetTagsHeaders = { serializedName: "Blob_getTagsHeaders", @@ -18107,39 +14500,39 @@ const BlobGetTagsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobGetTagsExceptionHeaders = { serializedName: "Blob_getTagsExceptionHeaders", @@ -18151,11 +14544,11 @@ const BlobGetTagsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetTagsHeaders = { serializedName: "Blob_setTagsHeaders", @@ -18167,39 +14560,39 @@ const BlobSetTagsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlobSetTagsExceptionHeaders = { serializedName: "Blob_setTagsExceptionHeaders", @@ -18211,11 +14604,11 @@ const BlobSetTagsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobCreateHeaders = { serializedName: "PageBlob_createHeaders", @@ -18227,88 +14620,88 @@ const PageBlobCreateHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobCreateExceptionHeaders = { serializedName: "PageBlob_createExceptionHeaders", @@ -18320,11 +14713,11 @@ const PageBlobCreateExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUploadPagesHeaders = { serializedName: "PageBlob_uploadPagesHeaders", @@ -18336,95 +14729,95 @@ const PageBlobUploadPagesHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUploadPagesExceptionHeaders = { serializedName: "PageBlob_uploadPagesExceptionHeaders", @@ -18436,11 +14829,11 @@ const PageBlobUploadPagesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobClearPagesHeaders = { serializedName: "PageBlob_clearPagesHeaders", @@ -18452,74 +14845,74 @@ const PageBlobClearPagesHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobClearPagesExceptionHeaders = { serializedName: "PageBlob_clearPagesExceptionHeaders", @@ -18531,11 +14924,11 @@ const PageBlobClearPagesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUploadPagesFromURLHeaders = { serializedName: "PageBlob_uploadPagesFromURLHeaders", @@ -18547,88 +14940,88 @@ const PageBlobUploadPagesFromURLHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", @@ -18640,11 +15033,11 @@ const PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobGetPageRangesHeaders = { serializedName: "PageBlob_getPageRangesHeaders", @@ -18656,60 +15049,60 @@ const PageBlobGetPageRangesHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobGetPageRangesExceptionHeaders = { serializedName: "PageBlob_getPageRangesExceptionHeaders", @@ -18721,11 +15114,11 @@ const PageBlobGetPageRangesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobGetPageRangesDiffHeaders = { serializedName: "PageBlob_getPageRangesDiffHeaders", @@ -18737,60 +15130,60 @@ const PageBlobGetPageRangesDiffHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", @@ -18802,11 +15195,11 @@ const PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobResizeHeaders = { serializedName: "PageBlob_resizeHeaders", @@ -18818,60 +15211,60 @@ const PageBlobResizeHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobResizeExceptionHeaders = { serializedName: "PageBlob_resizeExceptionHeaders", @@ -18883,11 +15276,11 @@ const PageBlobResizeExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUpdateSequenceNumberHeaders = { serializedName: "PageBlob_updateSequenceNumberHeaders", @@ -18899,60 +15292,60 @@ const PageBlobUpdateSequenceNumberHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", @@ -18964,11 +15357,11 @@ const PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobCopyIncrementalHeaders = { serializedName: "PageBlob_copyIncrementalHeaders", @@ -18980,68 +15373,68 @@ const PageBlobCopyIncrementalHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const PageBlobCopyIncrementalExceptionHeaders = { serializedName: "PageBlob_copyIncrementalExceptionHeaders", @@ -19053,11 +15446,11 @@ const PageBlobCopyIncrementalExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobCreateHeaders = { serializedName: "AppendBlob_createHeaders", @@ -19069,88 +15462,88 @@ const AppendBlobCreateHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobCreateExceptionHeaders = { serializedName: "AppendBlob_createExceptionHeaders", @@ -19162,11 +15555,11 @@ const AppendBlobCreateExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobAppendBlockHeaders = { serializedName: "AppendBlob_appendBlockHeaders", @@ -19178,102 +15571,102 @@ const AppendBlobAppendBlockHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobAppendOffset: { serializedName: "x-ms-blob-append-offset", xmlName: "x-ms-blob-append-offset", type: { - name: "String" - } + name: "String", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobAppendBlockExceptionHeaders = { serializedName: "AppendBlob_appendBlockExceptionHeaders", @@ -19285,11 +15678,11 @@ const AppendBlobAppendBlockExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobAppendBlockFromUrlHeaders = { serializedName: "AppendBlob_appendBlockFromUrlHeaders", @@ -19301,95 +15694,95 @@ const AppendBlobAppendBlockFromUrlHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobAppendOffset: { serializedName: "x-ms-blob-append-offset", xmlName: "x-ms-blob-append-offset", type: { - name: "String" - } + name: "String", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", @@ -19401,11 +15794,11 @@ const AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const AppendBlobSealHeaders = { serializedName: "AppendBlob_sealHeaders", @@ -19417,53 +15810,53 @@ const AppendBlobSealHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; const AppendBlobSealExceptionHeaders = { serializedName: "AppendBlob_sealExceptionHeaders", @@ -19475,11 +15868,11 @@ const AppendBlobSealExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobUploadHeaders = { serializedName: "BlockBlob_uploadHeaders", @@ -19491,88 +15884,88 @@ const BlockBlobUploadHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobUploadExceptionHeaders = { serializedName: "BlockBlob_uploadExceptionHeaders", @@ -19584,11 +15977,11 @@ const BlockBlobUploadExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobPutBlobFromUrlHeaders = { serializedName: "BlockBlob_putBlobFromUrlHeaders", @@ -19600,88 +15993,88 @@ const BlockBlobPutBlobFromUrlHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", @@ -19693,11 +16086,11 @@ const BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobStageBlockHeaders = { serializedName: "BlockBlob_stageBlockHeaders", @@ -19709,74 +16102,74 @@ const BlockBlobStageBlockHeaders = { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobStageBlockExceptionHeaders = { serializedName: "BlockBlob_stageBlockExceptionHeaders", @@ -19788,11 +16181,11 @@ const BlockBlobStageBlockExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobStageBlockFromURLHeaders = { serializedName: "BlockBlob_stageBlockFromURLHeaders", @@ -19804,74 +16197,74 @@ const BlockBlobStageBlockFromURLHeaders = { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobStageBlockFromURLExceptionHeaders = { serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", @@ -19883,11 +16276,11 @@ const BlockBlobStageBlockFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobCommitBlockListHeaders = { serializedName: "BlockBlob_commitBlockListHeaders", @@ -19899,95 +16292,95 @@ const BlockBlobCommitBlockListHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobCommitBlockListExceptionHeaders = { serializedName: "BlockBlob_commitBlockListExceptionHeaders", @@ -19999,14 +16392,14 @@ const BlockBlobCommitBlockListExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } -}; -const BlockBlobGetBlockListHeaders = { - serializedName: "BlockBlob_getBlockListHeaders", + name: "String", + }, + }, + }, + }, +}; +const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", type: { name: "Composite", className: "BlockBlobGetBlockListHeaders", @@ -20015,67 +16408,67 @@ const BlockBlobGetBlockListHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; const BlockBlobGetBlockListExceptionHeaders = { serializedName: "BlockBlob_getBlockListExceptionHeaders", @@ -20087,194 +16480,194 @@ const BlockBlobGetBlockListExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; var Mappers = /*#__PURE__*/Object.freeze({ __proto__: null, - BlobServiceProperties: BlobServiceProperties, - Logging: Logging, - RetentionPolicy: RetentionPolicy, - Metrics: Metrics, - CorsRule: CorsRule, - StaticWebsite: StaticWebsite, - StorageError: StorageError, - BlobServiceStatistics: BlobServiceStatistics, - GeoReplication: GeoReplication, - ListContainersSegmentResponse: ListContainersSegmentResponse, - ContainerItem: ContainerItem, - ContainerProperties: ContainerProperties, - KeyInfo: KeyInfo, - UserDelegationKey: UserDelegationKey, - FilterBlobSegment: FilterBlobSegment, - FilterBlobItem: FilterBlobItem, - BlobTags: BlobTags, - BlobTag: BlobTag, - SignedIdentifier: SignedIdentifier, AccessPolicy: AccessPolicy, - ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, + AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, + AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, + AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, + AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, + AppendBlobCreateHeaders: AppendBlobCreateHeaders, + AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, + AppendBlobSealHeaders: AppendBlobSealHeaders, + ArrowConfiguration: ArrowConfiguration, + ArrowField: ArrowField, + BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, + BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, + BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, + BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, + BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, + BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, + BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, + BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, + BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, + BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, + BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, + BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, + BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, + BlobDeleteHeaders: BlobDeleteHeaders, + BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, + BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, + BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, + BlobDownloadHeaders: BlobDownloadHeaders, BlobFlatListSegment: BlobFlatListSegment, + BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, + BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, + BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, + BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, + BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, + BlobGetTagsHeaders: BlobGetTagsHeaders, + BlobHierarchyListSegment: BlobHierarchyListSegment, BlobItemInternal: BlobItemInternal, BlobName: BlobName, - BlobPropertiesInternal: BlobPropertiesInternal, - ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, - BlobHierarchyListSegment: BlobHierarchyListSegment, BlobPrefix: BlobPrefix, - BlockLookupList: BlockLookupList, - BlockList: BlockList, + BlobPropertiesInternal: BlobPropertiesInternal, + BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, + BlobQueryHeaders: BlobQueryHeaders, + BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, + BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, + BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, + BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, + BlobServiceProperties: BlobServiceProperties, + BlobServiceStatistics: BlobServiceStatistics, + BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, + BlobSetExpiryHeaders: BlobSetExpiryHeaders, + BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, + BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, + BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, + BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, + BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, + BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, + BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, + BlobSetMetadataHeaders: BlobSetMetadataHeaders, + BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, + BlobSetTagsHeaders: BlobSetTagsHeaders, + BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, + BlobSetTierHeaders: BlobSetTierHeaders, + BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, + BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, + BlobTag: BlobTag, + BlobTags: BlobTags, + BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, + BlobUndeleteHeaders: BlobUndeleteHeaders, Block: Block, - PageList: PageList, - PageRange: PageRange, + BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, + BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, + BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders, + BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, + BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, + BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, + BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, + BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, + BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, + BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, + BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, + BlockBlobUploadHeaders: BlockBlobUploadHeaders, + BlockList: BlockList, + BlockLookupList: BlockLookupList, ClearRange: ClearRange, - QueryRequest: QueryRequest, - QuerySerialization: QuerySerialization, - QueryFormat: QueryFormat, - DelimitedTextConfiguration: DelimitedTextConfiguration, - JsonTextConfiguration: JsonTextConfiguration, - ArrowConfiguration: ArrowConfiguration, - ArrowField: ArrowField, - ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, - ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, - ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, - ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, - ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, - ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, - ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, - ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, - ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, - ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, - ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, - ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, - ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, - ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, - ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, - ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, - ContainerCreateHeaders: ContainerCreateHeaders, - ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, - ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, - ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, - ContainerDeleteHeaders: ContainerDeleteHeaders, - ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, - ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, - ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, - ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, - ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, - ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, - ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, - ContainerRestoreHeaders: ContainerRestoreHeaders, - ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, - ContainerRenameHeaders: ContainerRenameHeaders, - ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, - ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, - ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, - ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, - ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, - ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, - ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, - ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, - ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, - ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, - ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, + ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, - ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, - ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, + ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, + ContainerCreateHeaders: ContainerCreateHeaders, + ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, + ContainerDeleteHeaders: ContainerDeleteHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, + ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, + ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, + ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, + ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, + ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, + ContainerItem: ContainerItem, ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, - ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, - ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, - ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, - BlobDownloadHeaders: BlobDownloadHeaders, - BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, - BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, - BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, - BlobDeleteHeaders: BlobDeleteHeaders, - BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, - BlobUndeleteHeaders: BlobUndeleteHeaders, - BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, - BlobSetExpiryHeaders: BlobSetExpiryHeaders, - BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, - BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, - BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, - BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, - BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, - BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, - BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, - BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, - BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, - BlobSetMetadataHeaders: BlobSetMetadataHeaders, - BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, - BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, - BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, - BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, - BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, - BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, - BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, - BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, - BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, - BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, - BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, - BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, - BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, - BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, - BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, - BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, - BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, - BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, - BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, - BlobSetTierHeaders: BlobSetTierHeaders, - BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, - BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, - BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, - BlobQueryHeaders: BlobQueryHeaders, - BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, - BlobGetTagsHeaders: BlobGetTagsHeaders, - BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, - BlobSetTagsHeaders: BlobSetTagsHeaders, - BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, - PageBlobCreateHeaders: PageBlobCreateHeaders, - PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, - PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, - PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, - PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerProperties: ContainerProperties, + ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, + ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, + ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, + ContainerRenameHeaders: ContainerRenameHeaders, + ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, + ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, + ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, + ContainerRestoreHeaders: ContainerRestoreHeaders, + ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, + ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, + ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, + ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, + ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, + CorsRule: CorsRule, + DelimitedTextConfiguration: DelimitedTextConfiguration, + FilterBlobItem: FilterBlobItem, + FilterBlobSegment: FilterBlobSegment, + GeoReplication: GeoReplication, + JsonTextConfiguration: JsonTextConfiguration, + KeyInfo: KeyInfo, + ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, + ListContainersSegmentResponse: ListContainersSegmentResponse, + Logging: Logging, + Metrics: Metrics, PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, - PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, - PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, - PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, - PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, - PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, + PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, + PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, + PageBlobCreateHeaders: PageBlobCreateHeaders, PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, - PageBlobResizeHeaders: PageBlobResizeHeaders, + PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, + PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, - PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobResizeHeaders: PageBlobResizeHeaders, PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, - PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, - PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, - AppendBlobCreateHeaders: AppendBlobCreateHeaders, - AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, - AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, - AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, - AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, - AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, - AppendBlobSealHeaders: AppendBlobSealHeaders, - AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, - BlockBlobUploadHeaders: BlockBlobUploadHeaders, - BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, - BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, - BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, - BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, - BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, - BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, - BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, - BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, - BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, - BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, - BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders + PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, + PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, + PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, + PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, + PageList: PageList, + PageRange: PageRange, + QueryFormat: QueryFormat, + QueryRequest: QueryRequest, + QuerySerialization: QuerySerialization, + RetentionPolicy: RetentionPolicy, + ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, + ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, + ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, + ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, + ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, + ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, + ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, + ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, + ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, + ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, + ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, + ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, + ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, + ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, + ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, + ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, + SignedIdentifier: SignedIdentifier, + StaticWebsite: StaticWebsite, + StorageError: StorageError, + UserDelegationKey: UserDelegationKey }); /* @@ -20291,13 +16684,13 @@ const contentType = { isConstant: true, serializedName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobServiceProperties = { parameterPath: "blobServiceProperties", - mapper: BlobServiceProperties + mapper: BlobServiceProperties, }; const accept = { parameterPath: "accept", @@ -20306,9 +16699,9 @@ const accept = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; const url = { parameterPath: "url", @@ -20317,10 +16710,10 @@ const url = { required: true, xmlName: "url", type: { - name: "String" - } + name: "String", + }, }, - skipEncoding: true + skipEncoding: true, }; const restype = { parameterPath: "restype", @@ -20329,9 +16722,9 @@ const restype = { isConstant: true, serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp = { parameterPath: "comp", @@ -20340,22 +16733,22 @@ const comp = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const timeoutInSeconds = { parameterPath: ["options", "timeoutInSeconds"], mapper: { constraints: { - InclusiveMinimum: 0 + InclusiveMinimum: 0, }, serializedName: "timeout", xmlName: "timeout", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const version = { parameterPath: "version", @@ -20364,9 +16757,9 @@ const version = { isConstant: true, serializedName: "x-ms-version", type: { - name: "String" - } - } + name: "String", + }, + }, }; const requestId = { parameterPath: ["options", "requestId"], @@ -20374,9 +16767,9 @@ const requestId = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const accept1 = { parameterPath: "accept", @@ -20385,9 +16778,9 @@ const accept1 = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp1 = { parameterPath: "comp", @@ -20396,9 +16789,9 @@ const comp1 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp2 = { parameterPath: "comp", @@ -20407,9 +16800,9 @@ const comp2 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const prefix = { parameterPath: ["options", "prefix"], @@ -20417,9 +16810,9 @@ const prefix = { serializedName: "prefix", xmlName: "prefix", type: { - name: "String" - } - } + name: "String", + }, + }, }; const marker = { parameterPath: ["options", "marker"], @@ -20427,22 +16820,22 @@ const marker = { serializedName: "marker", xmlName: "marker", type: { - name: "String" - } - } + name: "String", + }, + }, }; const maxPageSize = { parameterPath: ["options", "maxPageSize"], mapper: { constraints: { - InclusiveMinimum: 1 + InclusiveMinimum: 1, }, serializedName: "maxresults", xmlName: "maxresults", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const include = { parameterPath: ["options", "include"], @@ -20455,16 +16848,16 @@ const include = { element: { type: { name: "Enum", - allowedValues: ["metadata", "deleted", "system"] - } - } - } + allowedValues: ["metadata", "deleted", "system"], + }, + }, + }, }, - collectionFormat: coreHttp.QueryCollectionFormat.Csv + collectionFormat: "CSV", }; const keyInfo = { parameterPath: "keyInfo", - mapper: KeyInfo + mapper: KeyInfo, }; const comp3 = { parameterPath: "comp", @@ -20473,9 +16866,9 @@ const comp3 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const restype1 = { parameterPath: "restype", @@ -20484,9 +16877,9 @@ const restype1 = { isConstant: true, serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; const body = { parameterPath: "body", @@ -20495,9 +16888,9 @@ const body = { required: true, xmlName: "body", type: { - name: "Stream" - } - } + name: "Stream", + }, + }, }; const comp4 = { parameterPath: "comp", @@ -20506,9 +16899,9 @@ const comp4 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const contentLength = { parameterPath: "contentLength", @@ -20517,9 +16910,9 @@ const contentLength = { required: true, xmlName: "Content-Length", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const multipartContentType = { parameterPath: "multipartContentType", @@ -20528,9 +16921,9 @@ const multipartContentType = { required: true, xmlName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp5 = { parameterPath: "comp", @@ -20539,9 +16932,9 @@ const comp5 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const where = { parameterPath: ["options", "where"], @@ -20549,9 +16942,9 @@ const where = { serializedName: "where", xmlName: "where", type: { - name: "String" - } - } + name: "String", + }, + }, }; const restype2 = { parameterPath: "restype", @@ -20560,21 +16953,21 @@ const restype2 = { isConstant: true, serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; const metadata = { parameterPath: ["options", "metadata"], mapper: { serializedName: "x-ms-meta", xmlName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" - } + }, }; const access = { parameterPath: ["options", "access"], @@ -20583,37 +16976,37 @@ const access = { xmlName: "x-ms-blob-public-access", type: { name: "Enum", - allowedValues: ["container", "blob"] - } - } + allowedValues: ["container", "blob"], + }, + }, }; const defaultEncryptionScope = { parameterPath: [ "options", "containerEncryptionScope", - "defaultEncryptionScope" + "defaultEncryptionScope", ], mapper: { serializedName: "x-ms-default-encryption-scope", xmlName: "x-ms-default-encryption-scope", type: { - name: "String" - } - } + name: "String", + }, + }, }; const preventEncryptionScopeOverride = { parameterPath: [ "options", "containerEncryptionScope", - "preventEncryptionScopeOverride" + "preventEncryptionScopeOverride", ], mapper: { serializedName: "x-ms-deny-encryption-scope-override", xmlName: "x-ms-deny-encryption-scope-override", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const leaseId = { parameterPath: ["options", "leaseAccessConditions", "leaseId"], @@ -20621,9 +17014,9 @@ const leaseId = { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const ifModifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], @@ -20631,9 +17024,9 @@ const ifModifiedSince = { serializedName: "If-Modified-Since", xmlName: "If-Modified-Since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; const ifUnmodifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], @@ -20641,9 +17034,9 @@ const ifUnmodifiedSince = { serializedName: "If-Unmodified-Since", xmlName: "If-Unmodified-Since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; const comp6 = { parameterPath: "comp", @@ -20652,9 +17045,9 @@ const comp6 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp7 = { parameterPath: "comp", @@ -20663,9 +17056,9 @@ const comp7 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const containerAcl = { parameterPath: ["options", "containerAcl"], @@ -20679,11 +17072,11 @@ const containerAcl = { element: { type: { name: "Composite", - className: "SignedIdentifier" - } - } - } - } + className: "SignedIdentifier", + }, + }, + }, + }, }; const comp8 = { parameterPath: "comp", @@ -20692,9 +17085,9 @@ const comp8 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const deletedContainerName = { parameterPath: ["options", "deletedContainerName"], @@ -20702,9 +17095,9 @@ const deletedContainerName = { serializedName: "x-ms-deleted-container-name", xmlName: "x-ms-deleted-container-name", type: { - name: "String" - } - } + name: "String", + }, + }, }; const deletedContainerVersion = { parameterPath: ["options", "deletedContainerVersion"], @@ -20712,9 +17105,9 @@ const deletedContainerVersion = { serializedName: "x-ms-deleted-container-version", xmlName: "x-ms-deleted-container-version", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp9 = { parameterPath: "comp", @@ -20723,9 +17116,9 @@ const comp9 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceContainerName = { parameterPath: "sourceContainerName", @@ -20734,9 +17127,9 @@ const sourceContainerName = { required: true, xmlName: "x-ms-source-container-name", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceLeaseId = { parameterPath: ["options", "sourceLeaseId"], @@ -20744,9 +17137,9 @@ const sourceLeaseId = { serializedName: "x-ms-source-lease-id", xmlName: "x-ms-source-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp10 = { parameterPath: "comp", @@ -20755,9 +17148,9 @@ const comp10 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const action = { parameterPath: "action", @@ -20766,9 +17159,9 @@ const action = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const duration = { parameterPath: ["options", "duration"], @@ -20776,9 +17169,9 @@ const duration = { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const proposedLeaseId = { parameterPath: ["options", "proposedLeaseId"], @@ -20786,9 +17179,9 @@ const proposedLeaseId = { serializedName: "x-ms-proposed-lease-id", xmlName: "x-ms-proposed-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const action1 = { parameterPath: "action", @@ -20797,9 +17190,9 @@ const action1 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const leaseId1 = { parameterPath: "leaseId", @@ -20808,9 +17201,9 @@ const leaseId1 = { required: true, xmlName: "x-ms-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const action2 = { parameterPath: "action", @@ -20819,9 +17212,9 @@ const action2 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const action3 = { parameterPath: "action", @@ -20830,9 +17223,9 @@ const action3 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const breakPeriod = { parameterPath: ["options", "breakPeriod"], @@ -20840,9 +17233,9 @@ const breakPeriod = { serializedName: "x-ms-lease-break-period", xmlName: "x-ms-lease-break-period", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const action4 = { parameterPath: "action", @@ -20851,9 +17244,9 @@ const action4 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const proposedLeaseId1 = { parameterPath: "proposedLeaseId", @@ -20862,9 +17255,9 @@ const proposedLeaseId1 = { required: true, xmlName: "x-ms-proposed-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; const include1 = { parameterPath: ["options", "include"], @@ -20887,13 +17280,13 @@ const include1 = { "tags", "immutabilitypolicy", "legalhold", - "deletedwithversions" - ] - } - } - } + "deletedwithversions", + ], + }, + }, + }, }, - collectionFormat: coreHttp.QueryCollectionFormat.Csv + collectionFormat: "CSV", }; const delimiter = { parameterPath: "delimiter", @@ -20902,9 +17295,9 @@ const delimiter = { required: true, xmlName: "delimiter", type: { - name: "String" - } - } + name: "String", + }, + }, }; const snapshot = { parameterPath: ["options", "snapshot"], @@ -20912,9 +17305,9 @@ const snapshot = { serializedName: "snapshot", xmlName: "snapshot", type: { - name: "String" - } - } + name: "String", + }, + }, }; const versionId = { parameterPath: ["options", "versionId"], @@ -20922,9 +17315,9 @@ const versionId = { serializedName: "versionid", xmlName: "versionid", type: { - name: "String" - } - } + name: "String", + }, + }, }; const range = { parameterPath: ["options", "range"], @@ -20932,9 +17325,9 @@ const range = { serializedName: "x-ms-range", xmlName: "x-ms-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; const rangeGetContentMD5 = { parameterPath: ["options", "rangeGetContentMD5"], @@ -20942,9 +17335,9 @@ const rangeGetContentMD5 = { serializedName: "x-ms-range-get-content-md5", xmlName: "x-ms-range-get-content-md5", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const rangeGetContentCRC64 = { parameterPath: ["options", "rangeGetContentCRC64"], @@ -20952,9 +17345,9 @@ const rangeGetContentCRC64 = { serializedName: "x-ms-range-get-content-crc64", xmlName: "x-ms-range-get-content-crc64", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const encryptionKey = { parameterPath: ["options", "cpkInfo", "encryptionKey"], @@ -20962,9 +17355,9 @@ const encryptionKey = { serializedName: "x-ms-encryption-key", xmlName: "x-ms-encryption-key", type: { - name: "String" - } - } + name: "String", + }, + }, }; const encryptionKeySha256 = { parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], @@ -20972,9 +17365,9 @@ const encryptionKeySha256 = { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } - } + name: "String", + }, + }, }; const encryptionAlgorithm = { parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], @@ -20982,9 +17375,9 @@ const encryptionAlgorithm = { serializedName: "x-ms-encryption-algorithm", xmlName: "x-ms-encryption-algorithm", type: { - name: "String" - } - } + name: "String", + }, + }, }; const ifMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], @@ -20992,9 +17385,9 @@ const ifMatch = { serializedName: "If-Match", xmlName: "If-Match", type: { - name: "String" - } - } + name: "String", + }, + }, }; const ifNoneMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], @@ -21002,9 +17395,9 @@ const ifNoneMatch = { serializedName: "If-None-Match", xmlName: "If-None-Match", type: { - name: "String" - } - } + name: "String", + }, + }, }; const ifTags = { parameterPath: ["options", "modifiedAccessConditions", "ifTags"], @@ -21012,9 +17405,9 @@ const ifTags = { serializedName: "x-ms-if-tags", xmlName: "x-ms-if-tags", type: { - name: "String" - } - } + name: "String", + }, + }, }; const deleteSnapshots = { parameterPath: ["options", "deleteSnapshots"], @@ -21023,9 +17416,9 @@ const deleteSnapshots = { xmlName: "x-ms-delete-snapshots", type: { name: "Enum", - allowedValues: ["include", "only"] - } - } + allowedValues: ["include", "only"], + }, + }, }; const blobDeleteType = { parameterPath: ["options", "blobDeleteType"], @@ -21033,9 +17426,9 @@ const blobDeleteType = { serializedName: "deletetype", xmlName: "deletetype", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp11 = { parameterPath: "comp", @@ -21044,9 +17437,9 @@ const comp11 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const expiryOptions = { parameterPath: "expiryOptions", @@ -21055,9 +17448,9 @@ const expiryOptions = { required: true, xmlName: "x-ms-expiry-option", type: { - name: "String" - } - } + name: "String", + }, + }, }; const expiresOn = { parameterPath: ["options", "expiresOn"], @@ -21065,9 +17458,9 @@ const expiresOn = { serializedName: "x-ms-expiry-time", xmlName: "x-ms-expiry-time", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobCacheControl = { parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], @@ -21075,9 +17468,9 @@ const blobCacheControl = { serializedName: "x-ms-blob-cache-control", xmlName: "x-ms-blob-cache-control", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobContentType = { parameterPath: ["options", "blobHttpHeaders", "blobContentType"], @@ -21085,9 +17478,9 @@ const blobContentType = { serializedName: "x-ms-blob-content-type", xmlName: "x-ms-blob-content-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobContentMD5 = { parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], @@ -21095,9 +17488,9 @@ const blobContentMD5 = { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; const blobContentEncoding = { parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], @@ -21105,9 +17498,9 @@ const blobContentEncoding = { serializedName: "x-ms-blob-content-encoding", xmlName: "x-ms-blob-content-encoding", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobContentLanguage = { parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], @@ -21115,9 +17508,9 @@ const blobContentLanguage = { serializedName: "x-ms-blob-content-language", xmlName: "x-ms-blob-content-language", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobContentDisposition = { parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], @@ -21125,9 +17518,9 @@ const blobContentDisposition = { serializedName: "x-ms-blob-content-disposition", xmlName: "x-ms-blob-content-disposition", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp12 = { parameterPath: "comp", @@ -21136,9 +17529,9 @@ const comp12 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const immutabilityPolicyExpiry = { parameterPath: ["options", "immutabilityPolicyExpiry"], @@ -21146,9 +17539,9 @@ const immutabilityPolicyExpiry = { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; const immutabilityPolicyMode = { parameterPath: ["options", "immutabilityPolicyMode"], @@ -21157,9 +17550,9 @@ const immutabilityPolicyMode = { xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, }; const comp13 = { parameterPath: "comp", @@ -21168,9 +17561,9 @@ const comp13 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const legalHold = { parameterPath: "legalHold", @@ -21179,9 +17572,9 @@ const legalHold = { required: true, xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const encryptionScope = { parameterPath: ["options", "encryptionScope"], @@ -21189,9 +17582,9 @@ const encryptionScope = { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp14 = { parameterPath: "comp", @@ -21200,9 +17593,9 @@ const comp14 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const tier = { parameterPath: ["options", "tier"], @@ -21226,10 +17619,10 @@ const tier = { "Hot", "Cool", "Archive", - "Cold" - ] - } - } + "Cold", + ], + }, + }, }; const rehydratePriority = { parameterPath: ["options", "rehydratePriority"], @@ -21238,37 +17631,37 @@ const rehydratePriority = { xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", - allowedValues: ["High", "Standard"] - } - } + allowedValues: ["High", "Standard"], + }, + }, }; const sourceIfModifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", - "sourceIfModifiedSince" + "sourceIfModifiedSince", ], mapper: { serializedName: "x-ms-source-if-modified-since", xmlName: "x-ms-source-if-modified-since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; const sourceIfUnmodifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", - "sourceIfUnmodifiedSince" + "sourceIfUnmodifiedSince", ], mapper: { serializedName: "x-ms-source-if-unmodified-since", xmlName: "x-ms-source-if-unmodified-since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; const sourceIfMatch = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], @@ -21276,23 +17669,23 @@ const sourceIfMatch = { serializedName: "x-ms-source-if-match", xmlName: "x-ms-source-if-match", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceIfNoneMatch = { parameterPath: [ "options", "sourceModifiedAccessConditions", - "sourceIfNoneMatch" + "sourceIfNoneMatch", ], mapper: { serializedName: "x-ms-source-if-none-match", xmlName: "x-ms-source-if-none-match", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceIfTags = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], @@ -21300,9 +17693,9 @@ const sourceIfTags = { serializedName: "x-ms-source-if-tags", xmlName: "x-ms-source-if-tags", type: { - name: "String" - } - } + name: "String", + }, + }, }; const copySource = { parameterPath: "copySource", @@ -21311,9 +17704,9 @@ const copySource = { required: true, xmlName: "x-ms-copy-source", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobTagsString = { parameterPath: ["options", "blobTagsString"], @@ -21321,9 +17714,9 @@ const blobTagsString = { serializedName: "x-ms-tags", xmlName: "x-ms-tags", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sealBlob = { parameterPath: ["options", "sealBlob"], @@ -21331,9 +17724,9 @@ const sealBlob = { serializedName: "x-ms-seal-blob", xmlName: "x-ms-seal-blob", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const legalHold1 = { parameterPath: ["options", "legalHold"], @@ -21341,9 +17734,9 @@ const legalHold1 = { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const xMsRequiresSync = { parameterPath: "xMsRequiresSync", @@ -21352,9 +17745,9 @@ const xMsRequiresSync = { isConstant: true, serializedName: "x-ms-requires-sync", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceContentMD5 = { parameterPath: ["options", "sourceContentMD5"], @@ -21362,9 +17755,9 @@ const sourceContentMD5 = { serializedName: "x-ms-source-content-md5", xmlName: "x-ms-source-content-md5", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; const copySourceAuthorization = { parameterPath: ["options", "copySourceAuthorization"], @@ -21372,9 +17765,9 @@ const copySourceAuthorization = { serializedName: "x-ms-copy-source-authorization", xmlName: "x-ms-copy-source-authorization", type: { - name: "String" - } - } + name: "String", + }, + }, }; const copySourceTags = { parameterPath: ["options", "copySourceTags"], @@ -21383,9 +17776,9 @@ const copySourceTags = { xmlName: "x-ms-copy-source-tag-option", type: { name: "Enum", - allowedValues: ["REPLACE", "COPY"] - } - } + allowedValues: ["REPLACE", "COPY"], + }, + }, }; const comp15 = { parameterPath: "comp", @@ -21394,9 +17787,9 @@ const comp15 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const copyActionAbortConstant = { parameterPath: "copyActionAbortConstant", @@ -21405,9 +17798,9 @@ const copyActionAbortConstant = { isConstant: true, serializedName: "x-ms-copy-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; const copyId = { parameterPath: "copyId", @@ -21416,9 +17809,9 @@ const copyId = { required: true, xmlName: "copyid", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp16 = { parameterPath: "comp", @@ -21427,9 +17820,9 @@ const comp16 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const tier1 = { parameterPath: "tier", @@ -21454,14 +17847,14 @@ const tier1 = { "Hot", "Cool", "Archive", - "Cold" - ] - } - } + "Cold", + ], + }, + }, }; const queryRequest = { parameterPath: ["options", "queryRequest"], - mapper: QueryRequest + mapper: QueryRequest, }; const comp17 = { parameterPath: "comp", @@ -21470,9 +17863,9 @@ const comp17 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp18 = { parameterPath: "comp", @@ -21481,13 +17874,13 @@ const comp18 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const tags = { parameterPath: ["options", "tags"], - mapper: BlobTags + mapper: BlobTags, }; const transactionalContentMD5 = { parameterPath: ["options", "transactionalContentMD5"], @@ -21495,9 +17888,9 @@ const transactionalContentMD5 = { serializedName: "Content-MD5", xmlName: "Content-MD5", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; const transactionalContentCrc64 = { parameterPath: ["options", "transactionalContentCrc64"], @@ -21505,9 +17898,9 @@ const transactionalContentCrc64 = { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; const blobType = { parameterPath: "blobType", @@ -21516,9 +17909,9 @@ const blobType = { isConstant: true, serializedName: "x-ms-blob-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobContentLength = { parameterPath: "blobContentLength", @@ -21527,19 +17920,20 @@ const blobContentLength = { required: true, xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const blobSequenceNumber = { parameterPath: ["options", "blobSequenceNumber"], mapper: { + defaultValue: 0, serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const contentType1 = { parameterPath: ["options", "contentType"], @@ -21548,9 +17942,9 @@ const contentType1 = { isConstant: true, serializedName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const body1 = { parameterPath: "body", @@ -21559,9 +17953,9 @@ const body1 = { required: true, xmlName: "body", type: { - name: "Stream" - } - } + name: "Stream", + }, + }, }; const accept2 = { parameterPath: "accept", @@ -21570,9 +17964,9 @@ const accept2 = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp19 = { parameterPath: "comp", @@ -21581,9 +17975,9 @@ const comp19 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const pageWrite = { parameterPath: "pageWrite", @@ -21592,51 +17986,51 @@ const pageWrite = { isConstant: true, serializedName: "x-ms-page-write", type: { - name: "String" - } - } + name: "String", + }, + }, }; const ifSequenceNumberLessThanOrEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", - "ifSequenceNumberLessThanOrEqualTo" + "ifSequenceNumberLessThanOrEqualTo", ], mapper: { serializedName: "x-ms-if-sequence-number-le", xmlName: "x-ms-if-sequence-number-le", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const ifSequenceNumberLessThan = { parameterPath: [ "options", "sequenceNumberAccessConditions", - "ifSequenceNumberLessThan" + "ifSequenceNumberLessThan", ], mapper: { serializedName: "x-ms-if-sequence-number-lt", xmlName: "x-ms-if-sequence-number-lt", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const ifSequenceNumberEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", - "ifSequenceNumberEqualTo" + "ifSequenceNumberEqualTo", ], mapper: { serializedName: "x-ms-if-sequence-number-eq", xmlName: "x-ms-if-sequence-number-eq", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const pageWrite1 = { parameterPath: "pageWrite", @@ -21645,9 +18039,9 @@ const pageWrite1 = { isConstant: true, serializedName: "x-ms-page-write", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceUrl = { parameterPath: "sourceUrl", @@ -21656,9 +18050,9 @@ const sourceUrl = { required: true, xmlName: "x-ms-copy-source", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceRange = { parameterPath: "sourceRange", @@ -21667,9 +18061,9 @@ const sourceRange = { required: true, xmlName: "x-ms-source-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sourceContentCrc64 = { parameterPath: ["options", "sourceContentCrc64"], @@ -21677,9 +18071,9 @@ const sourceContentCrc64 = { serializedName: "x-ms-source-content-crc64", xmlName: "x-ms-source-content-crc64", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; const range1 = { parameterPath: "range", @@ -21688,9 +18082,9 @@ const range1 = { required: true, xmlName: "x-ms-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp20 = { parameterPath: "comp", @@ -21699,9 +18093,9 @@ const comp20 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const prevsnapshot = { parameterPath: ["options", "prevsnapshot"], @@ -21709,9 +18103,9 @@ const prevsnapshot = { serializedName: "prevsnapshot", xmlName: "prevsnapshot", type: { - name: "String" - } - } + name: "String", + }, + }, }; const prevSnapshotUrl = { parameterPath: ["options", "prevSnapshotUrl"], @@ -21719,9 +18113,9 @@ const prevSnapshotUrl = { serializedName: "x-ms-previous-snapshot-url", xmlName: "x-ms-previous-snapshot-url", type: { - name: "String" - } - } + name: "String", + }, + }, }; const sequenceNumberAction = { parameterPath: "sequenceNumberAction", @@ -21731,9 +18125,9 @@ const sequenceNumberAction = { xmlName: "x-ms-sequence-number-action", type: { name: "Enum", - allowedValues: ["max", "update", "increment"] - } - } + allowedValues: ["max", "update", "increment"], + }, + }, }; const comp21 = { parameterPath: "comp", @@ -21742,9 +18136,9 @@ const comp21 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobType1 = { parameterPath: "blobType", @@ -21753,9 +18147,9 @@ const blobType1 = { isConstant: true, serializedName: "x-ms-blob-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp22 = { parameterPath: "comp", @@ -21764,9 +18158,9 @@ const comp22 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const maxSize = { parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], @@ -21774,23 +18168,23 @@ const maxSize = { serializedName: "x-ms-blob-condition-maxsize", xmlName: "x-ms-blob-condition-maxsize", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const appendPosition = { parameterPath: [ "options", "appendPositionAccessConditions", - "appendPosition" + "appendPosition", ], mapper: { serializedName: "x-ms-blob-condition-appendpos", xmlName: "x-ms-blob-condition-appendpos", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; const sourceRange1 = { parameterPath: ["options", "sourceRange"], @@ -21798,9 +18192,9 @@ const sourceRange1 = { serializedName: "x-ms-source-range", xmlName: "x-ms-source-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; const comp23 = { parameterPath: "comp", @@ -21809,9 +18203,9 @@ const comp23 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blobType2 = { parameterPath: "blobType", @@ -21820,9 +18214,9 @@ const blobType2 = { isConstant: true, serializedName: "x-ms-blob-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; const copySourceBlobProperties = { parameterPath: ["options", "copySourceBlobProperties"], @@ -21830,9 +18224,9 @@ const copySourceBlobProperties = { serializedName: "x-ms-copy-source-blob-properties", xmlName: "x-ms-copy-source-blob-properties", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; const comp24 = { parameterPath: "comp", @@ -21841,9 +18235,9 @@ const comp24 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blockId = { parameterPath: "blockId", @@ -21852,13 +18246,13 @@ const blockId = { required: true, xmlName: "blockid", type: { - name: "String" - } - } + name: "String", + }, + }, }; const blocks = { parameterPath: "blocks", - mapper: BlockLookupList + mapper: BlockLookupList, }; const comp25 = { parameterPath: "comp", @@ -21867,9 +18261,9 @@ const comp25 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; const listType = { parameterPath: "listType", @@ -21880,9 +18274,9 @@ const listType = { xmlName: "blocklisttype", type: { name: "Enum", - allowedValues: ["committed", "uncommitted", "all"] - } - } + allowedValues: ["committed", "uncommitted", "all"], + }, + }, }; /* @@ -21892,8 +18286,8 @@ const listType = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a Service. */ -class Service { +/** Class containing Service operations. */ +class ServiceImpl { /** * Initialize a new instance of the class Service class. * @param client Reference to the service client @@ -21908,11 +18302,7 @@ class Service { * @param options The options parameters. */ setProperties(blobServiceProperties, options) { - const operationArguments = { - blobServiceProperties, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); } /** * gets the properties of a storage account's Blob service, including properties for Storage Analytics @@ -21920,10 +18310,7 @@ class Service { * @param options The options parameters. */ getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$2); } /** * Retrieves statistics related to replication for the Blob service. It is only available on the @@ -21932,20 +18319,14 @@ class Service { * @param options The options parameters. */ getStatistics(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); } /** * The List Containers Segment operation returns a list of the containers under the specified account * @param options The options parameters. */ listContainersSegment(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); } /** * Retrieves a user delegation key for the Blob service. This is only a valid operation when using @@ -21954,21 +18335,14 @@ class Service { * @param options The options parameters. */ getUserDelegationKey(keyInfo, options) { - const operationArguments = { - keyInfo, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$2); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. @@ -21979,13 +18353,7 @@ class Service { * @param options The options parameters. */ submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec$1); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a @@ -21994,43 +18362,40 @@ class Service { * @param options The options parameters. */ filterBlobs(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec$1); } } // Operation Specifications -const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$5 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", responses: { 202: { - headersMapper: ServiceSetPropertiesHeaders + headersMapper: ServiceSetPropertiesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceSetPropertiesExceptionHeaders - } + headersMapper: ServiceSetPropertiesExceptionHeaders, + }, }, requestBody: blobServiceProperties, queryParameters: [ restype, comp, - timeoutInSeconds + timeoutInSeconds, ], urlParameters: [url], headerParameters: [ contentType, accept, version, - requestId + requestId, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const getPropertiesOperationSpec$2 = { path: "/", @@ -22038,26 +18403,26 @@ const getPropertiesOperationSpec$2 = { responses: { 200: { bodyMapper: BlobServiceProperties, - headersMapper: ServiceGetPropertiesHeaders + headersMapper: ServiceGetPropertiesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetPropertiesExceptionHeaders - } + headersMapper: ServiceGetPropertiesExceptionHeaders, + }, }, queryParameters: [ restype, comp, - timeoutInSeconds + timeoutInSeconds, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const getStatisticsOperationSpec = { path: "/", @@ -22065,26 +18430,26 @@ const getStatisticsOperationSpec = { responses: { 200: { bodyMapper: BlobServiceStatistics, - headersMapper: ServiceGetStatisticsHeaders + headersMapper: ServiceGetStatisticsHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetStatisticsExceptionHeaders - } + headersMapper: ServiceGetStatisticsExceptionHeaders, + }, }, queryParameters: [ restype, timeoutInSeconds, - comp1 + comp1, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const listContainersSegmentOperationSpec = { path: "/", @@ -22092,12 +18457,12 @@ const listContainersSegmentOperationSpec = { responses: { 200: { bodyMapper: ListContainersSegmentResponse, - headersMapper: ServiceListContainersSegmentHeaders + headersMapper: ServiceListContainersSegmentHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceListContainersSegmentExceptionHeaders - } + headersMapper: ServiceListContainersSegmentExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, @@ -22105,16 +18470,16 @@ const listContainersSegmentOperationSpec = { prefix, marker, maxPageSize, - include + include, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const getUserDelegationKeyOperationSpec = { path: "/", @@ -22122,48 +18487,48 @@ const getUserDelegationKeyOperationSpec = { responses: { 200: { bodyMapper: UserDelegationKey, - headersMapper: ServiceGetUserDelegationKeyHeaders + headersMapper: ServiceGetUserDelegationKeyHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetUserDelegationKeyExceptionHeaders - } + headersMapper: ServiceGetUserDelegationKeyExceptionHeaders, + }, }, requestBody: keyInfo, queryParameters: [ restype, timeoutInSeconds, - comp3 + comp3, ], urlParameters: [url], headerParameters: [ contentType, accept, version, - requestId + requestId, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const getAccountInfoOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { 200: { - headersMapper: ServiceGetAccountInfoHeaders + headersMapper: ServiceGetAccountInfoHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetAccountInfoExceptionHeaders - } + headersMapper: ServiceGetAccountInfoExceptionHeaders, + }, }, queryParameters: [comp, restype1], urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const submitBatchOperationSpec$1 = { path: "/", @@ -22172,30 +18537,29 @@ const submitBatchOperationSpec$1 = { 202: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: ServiceSubmitBatchHeaders + headersMapper: ServiceSubmitBatchHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceSubmitBatchExceptionHeaders - } + headersMapper: ServiceSubmitBatchExceptionHeaders, + }, }, requestBody: body, queryParameters: [timeoutInSeconds, comp4], urlParameters: [url], headerParameters: [ - contentType, accept, version, requestId, contentLength, - multipartContentType + multipartContentType, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; const filterBlobsOperationSpec$1 = { path: "/", @@ -22203,28 +18567,28 @@ const filterBlobsOperationSpec$1 = { responses: { 200: { bodyMapper: FilterBlobSegment, - headersMapper: ServiceFilterBlobsHeaders + headersMapper: ServiceFilterBlobsHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ServiceFilterBlobsExceptionHeaders - } + headersMapper: ServiceFilterBlobsExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, comp5, - where + where, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$5, }; /* @@ -22234,8 +18598,8 @@ const filterBlobsOperationSpec$1 = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a Container. */ -class Container { +/** Class containing Container operations. */ +class ContainerImpl { /** * Initialize a new instance of the class Container class. * @param client Reference to the service client @@ -22249,10 +18613,7 @@ class Container { * @param options The options parameters. */ create(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); + return this.client.sendOperationRequest({ options }, createOperationSpec$2); } /** * returns all user-defined metadata and system properties for the specified container. The data @@ -22260,10 +18621,7 @@ class Container { * @param options The options parameters. */ getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$1); } /** * operation marks the specified container for deletion. The container and any blobs contained within @@ -22271,20 +18629,14 @@ class Container { * @param options The options parameters. */ delete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); + return this.client.sendOperationRequest({ options }, deleteOperationSpec$1); } /** * operation sets one or more user-defined name-value pairs for the specified container. * @param options The options parameters. */ setMetadata(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec$1); } /** * gets the permissions for the specified container. The permissions indicate whether container data @@ -22292,10 +18644,7 @@ class Container { * @param options The options parameters. */ getAccessPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); } /** * sets the permissions for the specified container. The permissions indicate whether blobs in a @@ -22303,20 +18652,14 @@ class Container { * @param options The options parameters. */ setAccessPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); } /** * Restores a previously-deleted container. * @param options The options parameters. */ restore(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); + return this.client.sendOperationRequest({ options }, restoreOperationSpec); } /** * Renames an existing container. @@ -22324,11 +18667,7 @@ class Container { * @param options The options parameters. */ rename(sourceContainerName, options) { - const operationArguments = { - sourceContainerName, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renameOperationSpec); + return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. @@ -22339,13 +18678,7 @@ class Container { * @param options The options parameters. */ submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); } /** * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given @@ -22353,10 +18686,7 @@ class Container { * @param options The options parameters. */ filterBlobs(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -22364,10 +18694,7 @@ class Container { * @param options The options parameters. */ acquireLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -22376,11 +18703,7 @@ class Container { * @param options The options parameters. */ releaseLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -22389,11 +18712,7 @@ class Container { * @param options The options parameters. */ renewLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -22401,10 +18720,7 @@ class Container { * @param options The options parameters. */ breakLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -22416,22 +18732,14 @@ class Container { * @param options The options parameters. */ changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec$1); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param options The options parameters. */ listBlobFlatSegment(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container @@ -22442,36 +18750,29 @@ class Container { * @param options The options parameters. */ listBlobHierarchySegment(delimiter, options) { - const operationArguments = { - delimiter, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$1); } } // Operation Specifications -const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$4 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const createOperationSpec$2 = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { - headersMapper: ContainerCreateHeaders + headersMapper: ContainerCreateHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerCreateExceptionHeaders - } + headersMapper: ContainerCreateExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], @@ -22482,22 +18783,22 @@ const createOperationSpec$2 = { metadata, access, defaultEncryptionScope, - preventEncryptionScopeOverride + preventEncryptionScopeOverride, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const getPropertiesOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { - headersMapper: ContainerGetPropertiesHeaders + headersMapper: ContainerGetPropertiesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerGetPropertiesExceptionHeaders - } + headersMapper: ContainerGetPropertiesExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], @@ -22505,22 +18806,22 @@ const getPropertiesOperationSpec$1 = { version, requestId, accept1, - leaseId + leaseId, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const deleteOperationSpec$1 = { path: "/{containerName}", httpMethod: "DELETE", responses: { 202: { - headersMapper: ContainerDeleteHeaders + headersMapper: ContainerDeleteHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerDeleteExceptionHeaders - } + headersMapper: ContainerDeleteExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], @@ -22530,27 +18831,27 @@ const deleteOperationSpec$1 = { accept1, leaseId, ifModifiedSince, - ifUnmodifiedSince + ifUnmodifiedSince, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const setMetadataOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerSetMetadataHeaders + headersMapper: ContainerSetMetadataHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerSetMetadataExceptionHeaders - } + headersMapper: ContainerSetMetadataExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp6 + comp6, ], urlParameters: [url], headerParameters: [ @@ -22559,10 +18860,10 @@ const setMetadataOperationSpec$1 = { accept1, metadata, leaseId, - ifModifiedSince + ifModifiedSince, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const getAccessPolicyOperationSpec = { path: "/{containerName}", @@ -22573,53 +18874,53 @@ const getAccessPolicyOperationSpec = { type: { name: "Sequence", element: { - type: { name: "Composite", className: "SignedIdentifier" } - } + type: { name: "Composite", className: "SignedIdentifier" }, + }, }, serializedName: "SignedIdentifiers", xmlName: "SignedIdentifiers", xmlIsWrapped: true, - xmlElementName: "SignedIdentifier" + xmlElementName: "SignedIdentifier", }, - headersMapper: ContainerGetAccessPolicyHeaders + headersMapper: ContainerGetAccessPolicyHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerGetAccessPolicyExceptionHeaders - } + headersMapper: ContainerGetAccessPolicyExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp7 + comp7, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, - leaseId + leaseId, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const setAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerSetAccessPolicyHeaders + headersMapper: ContainerSetAccessPolicyHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerSetAccessPolicyExceptionHeaders - } + headersMapper: ContainerSetAccessPolicyExceptionHeaders, + }, }, requestBody: containerAcl, queryParameters: [ timeoutInSeconds, restype2, - comp7 + comp7, ], urlParameters: [url], headerParameters: [ @@ -22630,29 +18931,29 @@ const setAccessPolicyOperationSpec = { access, leaseId, ifModifiedSince, - ifUnmodifiedSince + ifUnmodifiedSince, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const restoreOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { - headersMapper: ContainerRestoreHeaders + headersMapper: ContainerRestoreHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerRestoreExceptionHeaders - } + headersMapper: ContainerRestoreExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp8 + comp8, ], urlParameters: [url], headerParameters: [ @@ -22660,27 +18961,27 @@ const restoreOperationSpec = { requestId, accept1, deletedContainerName, - deletedContainerVersion + deletedContainerVersion, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const renameOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerRenameHeaders + headersMapper: ContainerRenameHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerRenameExceptionHeaders - } + headersMapper: ContainerRenameExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp9 + comp9, ], urlParameters: [url], headerParameters: [ @@ -22688,10 +18989,10 @@ const renameOperationSpec = { requestId, accept1, sourceContainerName, - sourceLeaseId + sourceLeaseId, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const submitBatchOperationSpec = { path: "/{containerName}", @@ -22700,34 +19001,33 @@ const submitBatchOperationSpec = { 202: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: ContainerSubmitBatchHeaders + headersMapper: ContainerSubmitBatchHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerSubmitBatchExceptionHeaders - } + headersMapper: ContainerSubmitBatchExceptionHeaders, + }, }, requestBody: body, queryParameters: [ timeoutInSeconds, comp4, - restype2 + restype2, ], urlParameters: [url], headerParameters: [ - contentType, accept, version, requestId, contentLength, - multipartContentType + multipartContentType, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const filterBlobsOperationSpec = { path: "/{containerName}", @@ -22735,12 +19035,12 @@ const filterBlobsOperationSpec = { responses: { 200: { bodyMapper: FilterBlobSegment, - headersMapper: ContainerFilterBlobsHeaders + headersMapper: ContainerFilterBlobsHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerFilterBlobsExceptionHeaders - } + headersMapper: ContainerFilterBlobsExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, @@ -22748,33 +19048,33 @@ const filterBlobsOperationSpec = { maxPageSize, comp5, where, - restype2 + restype2, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const acquireLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { - headersMapper: ContainerAcquireLeaseHeaders + headersMapper: ContainerAcquireLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerAcquireLeaseExceptionHeaders - } + headersMapper: ContainerAcquireLeaseExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp10 + comp10, ], urlParameters: [url], headerParameters: [ @@ -22785,27 +19085,27 @@ const acquireLeaseOperationSpec$1 = { ifUnmodifiedSince, action, duration, - proposedLeaseId + proposedLeaseId, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const releaseLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerReleaseLeaseHeaders + headersMapper: ContainerReleaseLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerReleaseLeaseExceptionHeaders - } + headersMapper: ContainerReleaseLeaseExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp10 + comp10, ], urlParameters: [url], headerParameters: [ @@ -22815,27 +19115,27 @@ const releaseLeaseOperationSpec$1 = { ifModifiedSince, ifUnmodifiedSince, action1, - leaseId1 + leaseId1, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const renewLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerRenewLeaseHeaders + headersMapper: ContainerRenewLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerRenewLeaseExceptionHeaders - } + headersMapper: ContainerRenewLeaseExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp10 + comp10, ], urlParameters: [url], headerParameters: [ @@ -22845,27 +19145,27 @@ const renewLeaseOperationSpec$1 = { ifModifiedSince, ifUnmodifiedSince, leaseId1, - action2 + action2, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const breakLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 202: { - headersMapper: ContainerBreakLeaseHeaders + headersMapper: ContainerBreakLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerBreakLeaseExceptionHeaders - } + headersMapper: ContainerBreakLeaseExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp10 + comp10, ], urlParameters: [url], headerParameters: [ @@ -22875,27 +19175,27 @@ const breakLeaseOperationSpec$1 = { ifModifiedSince, ifUnmodifiedSince, action3, - breakPeriod + breakPeriod, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const changeLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: ContainerChangeLeaseHeaders + headersMapper: ContainerChangeLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerChangeLeaseExceptionHeaders - } + headersMapper: ContainerChangeLeaseExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, restype2, - comp10 + comp10, ], urlParameters: [url], headerParameters: [ @@ -22906,10 +19206,10 @@ const changeLeaseOperationSpec$1 = { ifUnmodifiedSince, leaseId1, action4, - proposedLeaseId1 + proposedLeaseId1, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const listBlobFlatSegmentOperationSpec = { path: "/{containerName}", @@ -22917,12 +19217,12 @@ const listBlobFlatSegmentOperationSpec = { responses: { 200: { bodyMapper: ListBlobsFlatSegmentResponse, - headersMapper: ContainerListBlobFlatSegmentHeaders + headersMapper: ContainerListBlobFlatSegmentHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerListBlobFlatSegmentExceptionHeaders - } + headersMapper: ContainerListBlobFlatSegmentExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, @@ -22931,16 +19231,16 @@ const listBlobFlatSegmentOperationSpec = { marker, maxPageSize, restype2, - include1 + include1, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", @@ -22948,12 +19248,12 @@ const listBlobHierarchySegmentOperationSpec = { responses: { 200: { bodyMapper: ListBlobsHierarchySegmentResponse, - headersMapper: ContainerListBlobHierarchySegmentHeaders + headersMapper: ContainerListBlobHierarchySegmentHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders - } + headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, @@ -22963,34 +19263,34 @@ const listBlobHierarchySegmentOperationSpec = { maxPageSize, restype2, include1, - delimiter + delimiter, ], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; const getAccountInfoOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { - headersMapper: ContainerGetAccountInfoHeaders + headersMapper: ContainerGetAccountInfoHeaders, }, default: { bodyMapper: StorageError, - headersMapper: ContainerGetAccountInfoExceptionHeaders - } + headersMapper: ContainerGetAccountInfoExceptionHeaders, + }, }, queryParameters: [comp, restype1], urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$4, }; /* @@ -23000,8 +19300,8 @@ const getAccountInfoOperationSpec$1 = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a Blob. */ -class Blob$1 { +/** Class containing Blob operations. */ +class BlobImpl { /** * Initialize a new instance of the class Blob class. * @param client Reference to the service client @@ -23015,10 +19315,7 @@ class Blob$1 { * @param options The options parameters. */ download(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + return this.client.sendOperationRequest({ options }, downloadOperationSpec); } /** * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system @@ -23026,10 +19323,7 @@ class Blob$1 { * @param options The options parameters. */ getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is @@ -23047,20 +19341,14 @@ class Blob$1 { * @param options The options parameters. */ delete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + return this.client.sendOperationRequest({ options }, deleteOperationSpec); } /** * Undelete a blob that was previously soft deleted * @param options The options parameters. */ undelete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + return this.client.sendOperationRequest({ options }, undeleteOperationSpec); } /** * Sets the time a blob will expire and be deleted. @@ -23068,41 +19356,28 @@ class Blob$1 { * @param options The options parameters. */ setExpiry(expiryOptions, options) { - const operationArguments = { - expiryOptions, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); } /** * The Set HTTP Headers operation sets system properties on the blob * @param options The options parameters. */ setHttpHeaders(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); } /** * The Set Immutability Policy operation sets the immutability policy on the blob * @param options The options parameters. */ setImmutabilityPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); } /** * The Delete Immutability Policy operation deletes the immutability policy on the blob * @param options The options parameters. */ deleteImmutabilityPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); } /** * The Set Legal Hold operation sets a legal hold on the blob. @@ -23110,11 +19385,7 @@ class Blob$1 { * @param options The options parameters. */ setLegalHold(legalHold, options) { - const operationArguments = { - legalHold, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); } /** * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more @@ -23122,10 +19393,7 @@ class Blob$1 { * @param options The options parameters. */ setMetadata(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -23133,10 +19401,7 @@ class Blob$1 { * @param options The options parameters. */ acquireLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -23145,11 +19410,7 @@ class Blob$1 { * @param options The options parameters. */ releaseLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -23158,11 +19419,7 @@ class Blob$1 { * @param options The options parameters. */ renewLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -23174,12 +19431,7 @@ class Blob$1 { * @param options The options parameters. */ changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -23187,20 +19439,14 @@ class Blob$1 { * @param options The options parameters. */ breakLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); } /** * The Create Snapshot operation creates a read-only snapshot of a blob * @param options The options parameters. */ createSnapshot(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); + return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); } /** * The Start Copy From URL operation copies a blob or an internet resource to a new blob. @@ -23211,11 +19457,7 @@ class Blob$1 { * @param options The options parameters. */ startCopyFromURL(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); + return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); } /** * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return @@ -23227,11 +19469,7 @@ class Blob$1 { * @param options The options parameters. */ copyFromURL(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); } /** * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination @@ -23241,11 +19479,7 @@ class Blob$1 { * @param options The options parameters. */ abortCopyFromURL(copyId, options) { - const operationArguments = { - copyId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); } /** * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium @@ -23257,21 +19491,14 @@ class Blob$1 { * @param options The options parameters. */ setTier(tier, options) { - const operationArguments = { - tier, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); } /** * The Query operation enables users to select/project on blob data by providing simple query @@ -23279,34 +19506,25 @@ class Blob$1 { * @param options The options parameters. */ query(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, queryOperationSpec); + return this.client.sendOperationRequest({ options }, queryOperationSpec); } /** * The Get Tags operation enables users to get the tags associated with a blob. * @param options The options parameters. */ getTags(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + return this.client.sendOperationRequest({ options }, getTagsOperationSpec); } /** * The Set Tags operation enables users to set tags on a blob. * @param options The options parameters. */ setTags(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); + return this.client.sendOperationRequest({ options }, setTagsOperationSpec); } } // Operation Specifications -const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$3 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", @@ -23314,26 +19532,26 @@ const downloadOperationSpec = { 200: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: BlobDownloadHeaders + headersMapper: BlobDownloadHeaders, }, 206: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: BlobDownloadHeaders + headersMapper: BlobDownloadHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobDownloadExceptionHeaders - } + headersMapper: BlobDownloadExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, - versionId + versionId, ], urlParameters: [url], headerParameters: [ @@ -23351,27 +19569,27 @@ const downloadOperationSpec = { encryptionAlgorithm, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const getPropertiesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { 200: { - headersMapper: BlobGetPropertiesHeaders + headersMapper: BlobGetPropertiesHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobGetPropertiesExceptionHeaders - } + headersMapper: BlobGetPropertiesExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, - versionId + versionId, ], urlParameters: [url], headerParameters: [ @@ -23386,28 +19604,28 @@ const getPropertiesOperationSpec = { encryptionAlgorithm, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const deleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { 202: { - headersMapper: BlobDeleteHeaders + headersMapper: BlobDeleteHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobDeleteExceptionHeaders - } + headersMapper: BlobDeleteExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, - blobDeleteType + blobDeleteType, ], urlParameters: [url], headerParameters: [ @@ -23420,44 +19638,44 @@ const deleteOperationSpec = { ifMatch, ifNoneMatch, ifTags, - deleteSnapshots + deleteSnapshots, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const undeleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobUndeleteHeaders + headersMapper: BlobUndeleteHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobUndeleteExceptionHeaders - } + headersMapper: BlobUndeleteExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp8], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setExpiryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetExpiryHeaders + headersMapper: BlobSetExpiryHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetExpiryExceptionHeaders - } + headersMapper: BlobSetExpiryExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp11], urlParameters: [url], @@ -23466,22 +19684,22 @@ const setExpiryOperationSpec = { requestId, accept1, expiryOptions, - expiresOn + expiresOn, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetHttpHeadersHeaders + headersMapper: BlobSetHttpHeadersHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetHttpHeadersExceptionHeaders - } + headersMapper: BlobSetHttpHeadersExceptionHeaders, + }, }, queryParameters: [comp, timeoutInSeconds], urlParameters: [url], @@ -23500,22 +19718,22 @@ const setHttpHeadersOperationSpec = { blobContentMD5, blobContentEncoding, blobContentLanguage, - blobContentDisposition + blobContentDisposition, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetImmutabilityPolicyHeaders + headersMapper: BlobSetImmutabilityPolicyHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetImmutabilityPolicyExceptionHeaders - } + headersMapper: BlobSetImmutabilityPolicyExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp12], urlParameters: [url], @@ -23525,44 +19743,44 @@ const setImmutabilityPolicyOperationSpec = { accept1, ifUnmodifiedSince, immutabilityPolicyExpiry, - immutabilityPolicyMode + immutabilityPolicyMode, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const deleteImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { 200: { - headersMapper: BlobDeleteImmutabilityPolicyHeaders + headersMapper: BlobDeleteImmutabilityPolicyHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders - } + headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp12], urlParameters: [url], headerParameters: [ version, requestId, - accept1 + accept1, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setLegalHoldOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetLegalHoldHeaders + headersMapper: BlobSetLegalHoldHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetLegalHoldExceptionHeaders - } + headersMapper: BlobSetLegalHoldExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp13], urlParameters: [url], @@ -23570,22 +19788,22 @@ const setLegalHoldOperationSpec = { version, requestId, accept1, - legalHold + legalHold, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setMetadataOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetMetadataHeaders + headersMapper: BlobSetMetadataHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetMetadataExceptionHeaders - } + headersMapper: BlobSetMetadataExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp6], urlParameters: [url], @@ -23603,22 +19821,22 @@ const setMetadataOperationSpec = { ifMatch, ifNoneMatch, ifTags, - encryptionScope + encryptionScope, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const acquireLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlobAcquireLeaseHeaders + headersMapper: BlobAcquireLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobAcquireLeaseExceptionHeaders - } + headersMapper: BlobAcquireLeaseExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], @@ -23633,22 +19851,22 @@ const acquireLeaseOperationSpec = { proposedLeaseId, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const releaseLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobReleaseLeaseHeaders + headersMapper: BlobReleaseLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobReleaseLeaseExceptionHeaders - } + headersMapper: BlobReleaseLeaseExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], @@ -23662,22 +19880,22 @@ const releaseLeaseOperationSpec = { leaseId1, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const renewLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobRenewLeaseHeaders + headersMapper: BlobRenewLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobRenewLeaseExceptionHeaders - } + headersMapper: BlobRenewLeaseExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], @@ -23691,22 +19909,22 @@ const renewLeaseOperationSpec = { action2, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const changeLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobChangeLeaseHeaders + headersMapper: BlobChangeLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobChangeLeaseExceptionHeaders - } + headersMapper: BlobChangeLeaseExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], @@ -23721,22 +19939,22 @@ const changeLeaseOperationSpec = { proposedLeaseId1, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const breakLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: BlobBreakLeaseHeaders + headersMapper: BlobBreakLeaseHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobBreakLeaseExceptionHeaders - } + headersMapper: BlobBreakLeaseExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], @@ -23750,22 +19968,22 @@ const breakLeaseOperationSpec = { breakPeriod, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const createSnapshotOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlobCreateSnapshotHeaders + headersMapper: BlobCreateSnapshotHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobCreateSnapshotExceptionHeaders - } + headersMapper: BlobCreateSnapshotExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp14], urlParameters: [url], @@ -23783,22 +20001,22 @@ const createSnapshotOperationSpec = { ifMatch, ifNoneMatch, ifTags, - encryptionScope + encryptionScope, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: BlobStartCopyFromURLHeaders + headersMapper: BlobStartCopyFromURLHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobStartCopyFromURLExceptionHeaders - } + headersMapper: BlobStartCopyFromURLExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], @@ -23825,22 +20043,22 @@ const startCopyFromURLOperationSpec = { copySource, blobTagsString, sealBlob, - legalHold1 + legalHold1, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const copyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: BlobCopyFromURLHeaders + headersMapper: BlobCopyFromURLHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobCopyFromURLExceptionHeaders - } + headersMapper: BlobCopyFromURLExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], @@ -23869,27 +20087,27 @@ const copyFromURLOperationSpec = { xMsRequiresSync, sourceContentMD5, copySourceAuthorization, - copySourceTags + copySourceTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 204: { - headersMapper: BlobAbortCopyFromURLHeaders + headersMapper: BlobAbortCopyFromURLHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobAbortCopyFromURLExceptionHeaders - } + headersMapper: BlobAbortCopyFromURLExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, comp15, - copyId + copyId, ], urlParameters: [url], headerParameters: [ @@ -23897,31 +20115,31 @@ const abortCopyFromURLOperationSpec = { requestId, accept1, leaseId, - copyActionAbortConstant + copyActionAbortConstant, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setTierOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: BlobSetTierHeaders + headersMapper: BlobSetTierHeaders, }, 202: { - headersMapper: BlobSetTierHeaders + headersMapper: BlobSetTierHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetTierExceptionHeaders - } + headersMapper: BlobSetTierExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, - comp16 + comp16, ], urlParameters: [url], headerParameters: [ @@ -23931,28 +20149,28 @@ const setTierOperationSpec = { leaseId, ifTags, rehydratePriority, - tier1 + tier1, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const getAccountInfoOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { - headersMapper: BlobGetAccountInfoHeaders + headersMapper: BlobGetAccountInfoHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobGetAccountInfoExceptionHeaders - } + headersMapper: BlobGetAccountInfoExceptionHeaders, + }, }, queryParameters: [comp, restype1], urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const queryOperationSpec = { path: "/{containerName}/{blob}", @@ -23961,27 +20179,27 @@ const queryOperationSpec = { 200: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: BlobQueryHeaders + headersMapper: BlobQueryHeaders, }, 206: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: BlobQueryHeaders + headersMapper: BlobQueryHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobQueryExceptionHeaders - } + headersMapper: BlobQueryExceptionHeaders, + }, }, requestBody: queryRequest, queryParameters: [ timeoutInSeconds, snapshot, - comp17 + comp17, ], urlParameters: [url], headerParameters: [ @@ -23997,12 +20215,12 @@ const queryOperationSpec = { encryptionAlgorithm, ifMatch, ifNoneMatch, - ifTags + ifTags, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const getTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -24010,18 +20228,18 @@ const getTagsOperationSpec = { responses: { 200: { bodyMapper: BlobTags, - headersMapper: BlobGetTagsHeaders + headersMapper: BlobGetTagsHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobGetTagsExceptionHeaders - } + headersMapper: BlobGetTagsExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, - comp18 + comp18, ], urlParameters: [url], headerParameters: [ @@ -24029,28 +20247,28 @@ const getTagsOperationSpec = { requestId, accept1, leaseId, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; const setTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 204: { - headersMapper: BlobSetTagsHeaders + headersMapper: BlobSetTagsHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlobSetTagsExceptionHeaders - } + headersMapper: BlobSetTagsExceptionHeaders, + }, }, requestBody: tags, queryParameters: [ timeoutInSeconds, versionId, - comp18 + comp18, ], urlParameters: [url], headerParameters: [ @@ -24061,12 +20279,12 @@ const setTagsOperationSpec = { leaseId, ifTags, transactionalContentMD5, - transactionalContentCrc64 + transactionalContentCrc64, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$3 + serializer: xmlSerializer$3, }; /* @@ -24076,8 +20294,8 @@ const setTagsOperationSpec = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a PageBlob. */ -class PageBlob { +/** Class containing PageBlob operations. */ +class PageBlobImpl { /** * Initialize a new instance of the class PageBlob class. * @param client Reference to the service client @@ -24093,12 +20311,7 @@ class PageBlob { * @param options The options parameters. */ create(contentLength, blobContentLength, options) { - const operationArguments = { - contentLength, - blobContentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); + return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec$1); } /** * The Upload Pages operation writes a range of pages to a page blob @@ -24107,12 +20320,7 @@ class PageBlob { * @param options The options parameters. */ uploadPages(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); } /** * The Clear Pages operation clears a set of pages from a page blob @@ -24120,11 +20328,7 @@ class PageBlob { * @param options The options parameters. */ clearPages(contentLength, options) { - const operationArguments = { - contentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); } /** * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a @@ -24138,14 +20342,7 @@ class PageBlob { * @param options The options parameters. */ uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { - const operationArguments = { - sourceUrl, - sourceRange, - contentLength, - range, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); } /** * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a @@ -24153,10 +20350,7 @@ class PageBlob { * @param options The options parameters. */ getPageRanges(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); } /** * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were @@ -24164,10 +20358,7 @@ class PageBlob { * @param options The options parameters. */ getPageRangesDiff(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); } /** * Resize the Blob @@ -24176,11 +20367,7 @@ class PageBlob { * @param options The options parameters. */ resize(blobContentLength, options) { - const operationArguments = { - blobContentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); } /** * Update the sequence number of the blob @@ -24190,11 +20377,7 @@ class PageBlob { * @param options The options parameters. */ updateSequenceNumber(sequenceNumberAction, options) { - const operationArguments = { - sequenceNumberAction, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); } /** * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. @@ -24209,444 +20392,423 @@ class PageBlob { * @param options The options parameters. */ copyIncremental(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); + return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); } } // Operation Specifications -const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const xmlSerializer$2 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const createOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: PageBlobCreateHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobCreateExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - blobType, - blobContentLength, - blobSequenceNumber - ], - isXML: true, - serializer: xmlSerializer$2 -}; -const uploadPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobUploadPagesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesExceptionHeaders - } - }, - requestBody: body1, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - pageWrite, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo - ], - mediaType: "binary", - serializer: serializer$2 -}; -const clearPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobClearPagesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobClearPagesExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo, - pageWrite1 - ], - isXML: true, - serializer: xmlSerializer$2 -}; -const uploadPagesFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobUploadPagesFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesFromURLExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - pageWrite, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo, - sourceUrl, - sourceRange, - sourceContentCrc64, - range1 - ], - isXML: true, - serializer: xmlSerializer$2 -}; -const getPageRangesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - snapshot, - comp20 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$2 -}; -const getPageRangesDiffOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesDiffHeaders + headersMapper: PageBlobCreateHeaders, }, default: { bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesDiffExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - snapshot, - comp20, - prevsnapshot - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - ifMatch, - ifNoneMatch, - ifTags, - prevSnapshotUrl - ], - isXML: true, - serializer: xmlSerializer$2 -}; -const resizeOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: PageBlobResizeHeaders + headersMapper: PageBlobCreateExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + blobType, + blobContentLength, + blobSequenceNumber, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer$2, +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobClearPagesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobClearPagesExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + pageWrite1, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesFromURLExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + sourceUrl, + sourceRange, + sourceContentCrc64, + range1, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesDiffHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesDiffExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + prevsnapshot, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + prevSnapshotUrl, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobResizeHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobResizeExceptionHeaders, + }, + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + blobContentLength, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobUpdateSequenceNumberHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders, + }, + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobSequenceNumber, + sequenceNumberAction, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: PageBlobCopyIncrementalHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCopyIncrementalExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp21], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + copySource, + ], + isXML: true, + serializer: xmlSerializer$2, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing AppendBlob operations. */ +class AppendBlobImpl { + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + return this.client.sendOperationRequest({ options }, sealOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$1 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobCreateHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobCreateExceptionHeaders, }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobResizeExceptionHeaders - } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - blobContentLength - ], - isXML: true, - serializer: xmlSerializer$2 -}; -const updateSequenceNumberOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: PageBlobUpdateSequenceNumberHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders - } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - blobSequenceNumber, - sequenceNumberAction - ], - isXML: true, - serializer: xmlSerializer$2 -}; -const copyIncrementalOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: PageBlobCopyIncrementalHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobCopyIncrementalExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp21], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - copySource - ], - isXML: true, - serializer: xmlSerializer$2 -}; - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -/** Class representing a AppendBlob. */ -class AppendBlob { - /** - * Initialize a new instance of the class AppendBlob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; - } - /** - * The Create Append Blob operation creates a new append blob. - * @param contentLength The length of the request. - * @param options The options parameters. - */ - create(contentLength, options) { - const operationArguments = { - contentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); - } - /** - * The Append Block operation commits a new block of data to the end of an existing append blob. The - * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to - * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. - */ - appendBlock(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); - } - /** - * The Append Block operation commits a new block of data to the end of an existing append blob where - * the contents are read from a source url. The Append Block operation is permitted only if the blob - * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version - * 2015-02-21 version or later. - * @param sourceUrl Specify a URL to the copy source. - * @param contentLength The length of the request. - * @param options The options parameters. - */ - appendBlockFromUrl(sourceUrl, contentLength, options) { - const operationArguments = { - sourceUrl, - contentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); - } - /** - * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version - * 2019-12-12 version or later. - * @param options The options parameters. - */ - seal(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, sealOperationSpec); - } -} -// Operation Specifications -const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); -const createOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobCreateHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobCreateExceptionHeaders - } }, queryParameters: [timeoutInSeconds], urlParameters: [url], @@ -24676,22 +20838,22 @@ const createOperationSpec = { encryptionScope, blobTagsString, legalHold1, - blobType1 + blobType1, ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$1, }; const appendBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: AppendBlobAppendBlockHeaders + headersMapper: AppendBlobAppendBlockHeaders, }, default: { bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockExceptionHeaders - } + headersMapper: AppendBlobAppendBlockExceptionHeaders, + }, }, requestBody: body1, queryParameters: [timeoutInSeconds, comp22], @@ -24715,22 +20877,24 @@ const appendBlockOperationSpec = { contentType1, accept2, maxSize, - appendPosition + appendPosition, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer: serializer$1 + serializer: xmlSerializer$1, }; const appendBlockFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: AppendBlobAppendBlockFromUrlHeaders + headersMapper: AppendBlobAppendBlockFromUrlHeaders, }, default: { bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders - } + headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp22], urlParameters: [url], @@ -24760,22 +20924,22 @@ const appendBlockFromUrlOperationSpec = { sourceContentCrc64, maxSize, appendPosition, - sourceRange1 + sourceRange1, ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$1, }; const sealOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: AppendBlobSealHeaders + headersMapper: AppendBlobSealHeaders, }, default: { bodyMapper: StorageError, - headersMapper: AppendBlobSealExceptionHeaders - } + headersMapper: AppendBlobSealExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds, comp23], urlParameters: [url], @@ -24788,10 +20952,10 @@ const sealOperationSpec = { ifUnmodifiedSince, ifMatch, ifNoneMatch, - appendPosition + appendPosition, ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$1, }; /* @@ -24801,8 +20965,8 @@ const sealOperationSpec = { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -/** Class representing a BlockBlob. */ -class BlockBlob { +/** Class containing BlockBlob operations. */ +class BlockBlobImpl { /** * Initialize a new instance of the class BlockBlob class. * @param client Reference to the service client @@ -24820,12 +20984,7 @@ class BlockBlob { * @param options The options parameters. */ upload(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); } /** * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read @@ -24841,12 +21000,7 @@ class BlockBlob { * @param options The options parameters. */ putBlobFromUrl(contentLength, copySource, options) { - const operationArguments = { - contentLength, - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); } /** * The Stage Block operation creates a new block to be committed as part of a blob @@ -24858,13 +21012,7 @@ class BlockBlob { * @param options The options parameters. */ stageBlock(blockId, contentLength, body, options) { - const operationArguments = { - blockId, - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); } /** * The Stage Block operation creates a new block to be committed as part of a blob where the contents @@ -24877,13 +21025,7 @@ class BlockBlob { * @param options The options parameters. */ stageBlockFromURL(blockId, contentLength, sourceUrl, options) { - const operationArguments = { - blockId, - contentLength, - sourceUrl, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); } /** * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the @@ -24897,11 +21039,7 @@ class BlockBlob { * @param options The options parameters. */ commitBlockList(blocks, options) { - const operationArguments = { - blocks, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); } /** * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block @@ -24911,27 +21049,22 @@ class BlockBlob { * @param options The options parameters. */ getBlockList(listType, options) { - const operationArguments = { - listType, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); } } // Operation Specifications -const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const xmlSerializer = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlockBlobUploadHeaders + headersMapper: BlockBlobUploadHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobUploadExceptionHeaders - } + headersMapper: BlockBlobUploadExceptionHeaders, + }, }, requestBody: body1, queryParameters: [timeoutInSeconds], @@ -24966,22 +21099,24 @@ const uploadOperationSpec = { transactionalContentCrc64, contentType1, accept2, - blobType2 + blobType2, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer + serializer: xmlSerializer, }; const putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlockBlobPutBlobFromUrlHeaders + headersMapper: BlockBlobPutBlobFromUrlHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders - } + headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders, + }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], @@ -25020,28 +21155,28 @@ const putBlobFromUrlOperationSpec = { copySourceTags, transactionalContentMD5, blobType2, - copySourceBlobProperties + copySourceBlobProperties, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const stageBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlockBlobStageBlockHeaders + headersMapper: BlockBlobStageBlockHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockExceptionHeaders - } + headersMapper: BlockBlobStageBlockExceptionHeaders, + }, }, requestBody: body1, queryParameters: [ timeoutInSeconds, comp24, - blockId + blockId, ], urlParameters: [url], headerParameters: [ @@ -25056,27 +21191,29 @@ const stageBlockOperationSpec = { transactionalContentMD5, transactionalContentCrc64, contentType1, - accept2 + accept2, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer + serializer: xmlSerializer, }; const stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlockBlobStageBlockFromURLHeaders + headersMapper: BlockBlobStageBlockFromURLHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockFromURLExceptionHeaders - } + headersMapper: BlockBlobStageBlockFromURLExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, comp24, - blockId + blockId, ], urlParameters: [url], headerParameters: [ @@ -25097,22 +21234,22 @@ const stageBlockFromURLOperationSpec = { copySourceAuthorization, sourceUrl, sourceContentCrc64, - sourceRange1 + sourceRange1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const commitBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: BlockBlobCommitBlockListHeaders + headersMapper: BlockBlobCommitBlockListHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobCommitBlockListExceptionHeaders - } + headersMapper: BlockBlobCommitBlockListExceptionHeaders, + }, }, requestBody: blocks, queryParameters: [timeoutInSeconds, comp25], @@ -25145,12 +21282,12 @@ const commitBlockListOperationSpec = { blobTagsString, legalHold1, transactionalContentMD5, - transactionalContentCrc64 + transactionalContentCrc64, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer, }; const getBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -25158,18 +21295,18 @@ const getBlockListOperationSpec = { responses: { 200: { bodyMapper: BlockList, - headersMapper: BlockBlobGetBlockListHeaders + headersMapper: BlockBlobGetBlockListHeaders, }, default: { bodyMapper: StorageError, - headersMapper: BlockBlobGetBlockListExceptionHeaders - } + headersMapper: BlockBlobGetBlockListExceptionHeaders, + }, }, queryParameters: [ timeoutInSeconds, snapshot, comp25, - listType + listType, ], urlParameters: [url], headerParameters: [ @@ -25177,3624 +21314,2827 @@ const getBlockListOperationSpec = { requestId, accept1, leaseId, - ifTags + ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +let StorageClient$1 = class StorageClient extends coreHttpCompat__namespace.ExtendedServiceClient { + /** + * Initializes a new instance of the StorageClient class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + var _a, _b; + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + const defaults = { + requestContentType: "application/json; charset=utf-8", + }; + const packageDetails = `azsdk-js-azure-storage-blob/12.23.0`; + const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix + ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: { + userAgentPrefix, + }, endpoint: (_b = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" }); + super(optionsWithDefaults); + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2023-11-03"; + this.service = new ServiceImpl(this); + this.container = new ContainerImpl(this); + this.blob = new BlobImpl(this); + this.pageBlob = new PageBlobImpl(this); + this.appendBlob = new AppendBlobImpl(this); + this.blockBlob = new BlockBlobImpl(this); + } }; // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * The `@azure/logger` configuration for this package. + * @internal */ -const logger = logger$1.createClientLogger("storage-blob"); +class StorageContextClient extends StorageClient$1 { + async sendOperationRequest(operationArguments, operationSpec) { + const operationSpecToSend = Object.assign({}, operationSpec); + if (operationSpecToSend.path === "/{containerName}" || + operationSpecToSend.path === "/{containerName}/{blob}") { + operationSpecToSend.path = ""; + } + return super.sendOperationRequest(operationArguments, operationSpecToSend); + } +} // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const SDK_VERSION = "12.18.0"; -const SERVICE_VERSION = "2024-05-04"; -const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB -const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB -const BLOCK_BLOB_MAX_BLOCKS = 50000; -const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB -const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB -const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; -const REQUEST_TIMEOUT = 100 * 1000; // In ms /** - * The OAuth scope to use with Azure Storage. + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. */ -const StorageOAuthScopes = "https://storage.azure.com/.default"; -const URLConstants = { - Parameters: { - FORCE_BROWSER_NO_CACHE: "_", - SIGNATURE: "sig", - SNAPSHOT: "snapshot", - VERSIONID: "versionid", - TIMEOUT: "timeout", - }, -}; -const HTTPURLConnection = { - HTTP_ACCEPTED: 202, - HTTP_CONFLICT: 409, - HTTP_NOT_FOUND: 404, - HTTP_PRECON_FAILED: 412, - HTTP_RANGE_NOT_SATISFIABLE: 416, -}; -const HeaderConstants = { - AUTHORIZATION: "Authorization", - AUTHORIZATION_SCHEME: "Bearer", - CONTENT_ENCODING: "Content-Encoding", - CONTENT_ID: "Content-ID", - CONTENT_LANGUAGE: "Content-Language", - CONTENT_LENGTH: "Content-Length", - CONTENT_MD5: "Content-Md5", - CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", - CONTENT_TYPE: "Content-Type", - COOKIE: "Cookie", - DATE: "date", - IF_MATCH: "if-match", - IF_MODIFIED_SINCE: "if-modified-since", - IF_NONE_MATCH: "if-none-match", - IF_UNMODIFIED_SINCE: "if-unmodified-since", - PREFIX_FOR_STORAGE: "x-ms-", - RANGE: "Range", - USER_AGENT: "User-Agent", - X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", - X_MS_COPY_SOURCE: "x-ms-copy-source", - X_MS_DATE: "x-ms-date", - X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version", -}; -const ETagNone = ""; -const ETagAny = "*"; -const SIZE_1_MB = 1 * 1024 * 1024; -const BATCH_MAX_REQUEST = 256; -const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; -const HTTP_LINE_ENDING = "\r\n"; -const HTTP_VERSION_1_1 = "HTTP/1.1"; -const EncryptionAlgorithmAES25 = "AES256"; -const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; -const StorageBlobLoggingAllowedHeaderNames = [ - "Access-Control-Allow-Origin", - "Cache-Control", - "Content-Length", - "Content-Type", - "Date", - "Request-Id", - "traceparent", - "Transfer-Encoding", - "User-Agent", - "x-ms-client-request-id", - "x-ms-date", - "x-ms-error-code", - "x-ms-request-id", - "x-ms-return-client-request-id", - "x-ms-version", - "Accept-Ranges", - "Content-Disposition", - "Content-Encoding", - "Content-Language", - "Content-MD5", - "Content-Range", - "ETag", - "Last-Modified", - "Server", - "Vary", - "x-ms-content-crc64", - "x-ms-copy-action", - "x-ms-copy-completion-time", - "x-ms-copy-id", - "x-ms-copy-progress", - "x-ms-copy-status", - "x-ms-has-immutability-policy", - "x-ms-has-legal-hold", - "x-ms-lease-state", - "x-ms-lease-status", - "x-ms-range", - "x-ms-request-server-encrypted", - "x-ms-server-encrypted", - "x-ms-snapshot", - "x-ms-source-range", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "x-ms-access-tier", - "x-ms-access-tier-change-time", - "x-ms-access-tier-inferred", - "x-ms-account-kind", - "x-ms-archive-status", - "x-ms-blob-append-offset", - "x-ms-blob-cache-control", - "x-ms-blob-committed-block-count", - "x-ms-blob-condition-appendpos", - "x-ms-blob-condition-maxsize", - "x-ms-blob-content-disposition", - "x-ms-blob-content-encoding", - "x-ms-blob-content-language", - "x-ms-blob-content-length", - "x-ms-blob-content-md5", - "x-ms-blob-content-type", - "x-ms-blob-public-access", - "x-ms-blob-sequence-number", - "x-ms-blob-type", - "x-ms-copy-destination-snapshot", - "x-ms-creation-time", - "x-ms-default-encryption-scope", - "x-ms-delete-snapshots", - "x-ms-delete-type-permanent", - "x-ms-deny-encryption-scope-override", - "x-ms-encryption-algorithm", - "x-ms-if-sequence-number-eq", - "x-ms-if-sequence-number-le", - "x-ms-if-sequence-number-lt", - "x-ms-incremental-copy", - "x-ms-lease-action", - "x-ms-lease-break-period", - "x-ms-lease-duration", - "x-ms-lease-id", - "x-ms-lease-time", - "x-ms-page-write", - "x-ms-proposed-lease-id", - "x-ms-range-get-content-md5", - "x-ms-rehydrate-priority", - "x-ms-sequence-number-action", - "x-ms-sku-name", - "x-ms-source-content-md5", - "x-ms-source-if-match", - "x-ms-source-if-modified-since", - "x-ms-source-if-none-match", - "x-ms-source-if-unmodified-since", - "x-ms-tag-count", - "x-ms-encryption-key-sha256", - "x-ms-if-tags", - "x-ms-source-if-tags", -]; -const StorageBlobLoggingAllowedQueryParameters = [ - "comp", - "maxresults", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "se", - "si", - "sip", - "sp", - "spr", - "sr", - "srt", - "ss", - "st", - "sv", - "include", - "marker", - "prefix", - "copyid", - "restype", - "blockid", - "blocklisttype", - "delimiter", - "prevsnapshot", - "ske", - "skoid", - "sks", - "skt", - "sktid", - "skv", - "snapshot", -]; -const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; -const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; -/// List of ports used for path style addressing. -/// Path style addressing means that storage account is put in URI's Path segment in instead of in host. -const PathStylePorts = [ - "10000", - "10001", - "10002", - "10003", - "10004", - "10100", - "10101", - "10102", - "10103", - "10104", - "11000", - "11001", - "11002", - "11003", - "11004", - "11100", - "11101", - "11102", - "11103", - "11104", -]; +class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline)); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = getCredentialFromPipeline(pipeline); + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; + } +} // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Reserved URL characters must be properly escaped for Storage services like Blob or File. - * - * ## URL encode and escape strategy for JS SDKs - * - * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. - * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL - * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. - * - * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. - * - * This is what legacy V2 SDK does, simple and works for most of the cases. - * - When customer URL string is "http://account.blob.core.windows.net/con/b:", - * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. - * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", - * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. - * - * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is - * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. - * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. - * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. - * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: - * - * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. - * - * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. - * - When customer URL string is "http://account.blob.core.windows.net/con/b:", - * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. - * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", - * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. - * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", - * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. - * - * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string - * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. - * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. - * And following URL strings are invalid: - * - "http://account.blob.core.windows.net/con/b%" - * - "http://account.blob.core.windows.net/con/b%2" - * - "http://account.blob.core.windows.net/con/b%G" - * - * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. - * - * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` - * - * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata - * - * @param url - + * Creates a span using the global tracer. + * @internal */ -function escapeURLPath(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let path = urlParsed.getPath(); - path = path || "/"; - path = escape(path); - urlParsed.setPath(path); - return urlParsed.toString(); -} -function getProxyUriFromDevConnString(connectionString) { - // Development Connection String - // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key - let proxyUri = ""; - if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { - // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri - const matchCredentials = connectionString.split(";"); - for (const element of matchCredentials) { - if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { - proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; - } - } - } - return proxyUri; -} -function getValueInConnString(connectionString, argument) { - const elements = connectionString.split(";"); - for (const element of elements) { - if (element.trim().startsWith(argument)) { - return element.trim().match(argument + "=(.*)")[1]; - } - } - return ""; -} +const tracingClient = coreTracing.createTracingClient({ + packageName: "@azure/storage-blob", + packageVersion: SDK_VERSION, + namespace: "Microsoft.Storage", +}); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Extracts the parts of an Azure Storage account connection string. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param connectionString - Connection string. - * @returns String key value pairs of the storage account's url and credentials. + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. */ -function extractConnectionStringParts(connectionString) { - let proxyUri = ""; - if (connectionString.startsWith("UseDevelopmentStorage=true")) { - // Development connection string - proxyUri = getProxyUriFromDevConnString(connectionString); - connectionString = DevelopmentConnectionString; +class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } - // Matching BlobEndpoint in the Account connection string - let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); - // Slicing off '/' at the end if exists - // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) - blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; - if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && - connectionString.search("AccountKey=") !== -1) { - // Account connection string - let defaultEndpointsProtocol = ""; - let accountName = ""; - let accountKey = Buffer.from("accountKey", "base64"); - let endpointSuffix = ""; - // Get account name and key - accountName = getValueInConnString(connectionString, "AccountName"); - accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); - if (!blobEndpoint) { - // BlobEndpoint is not present in the Account connection string - // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` - defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); - const protocol = defaultEndpointsProtocol.toLowerCase(); - if (protocol !== "https" && protocol !== "http") { - throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); - } - endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); - if (!endpointSuffix) { - throw new Error("Invalid EndpointSuffix in the provided Connection String"); + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); } - blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; } - if (!accountName) { - throw new Error("Invalid AccountName in the provided Connection String"); + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; } - else if (accountKey.length === 0) { - throw new Error("Invalid AccountKey in the provided Connection String"); + if (permissionLike.add) { + blobSASPermissions.add = true; } - return { - kind: "AccountConnString", - url: blobEndpoint, - accountName, - accountKey, - proxyUri, - }; + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; } - else { - // SAS connection string - const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); - let accountName = getValueInConnString(connectionString, "AccountName"); - // if accountName is empty, try to read it from BlobEndpoint - if (!accountName) { - accountName = getAccountNameFromUrl(blobEndpoint); + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); } - if (!blobEndpoint) { - throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + if (this.add) { + permissions.push("a"); } - else if (!accountSas) { - throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + if (this.create) { + permissions.push("c"); } - return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); } } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Internal escape method implemented Strategy Two mentioned in escapeURL() description. - * - * @param text - - */ -function escape(text) { - return encodeURIComponent(text) - .replace(/%2F/g, "/") // Don't escape for "/" - .replace(/'/g, "%27") // Escape for "'" - .replace(/\+/g, "%20") - .replace(/%25/g, "%"); // Revert encoded "%" -} -/** - * Append a string to URL path. Will remove duplicated "/" in front of the string - * when URL path ends with a "/". - * - * @param url - Source URL string - * @param name - String to be appended to URL - * @returns An updated URL string + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. */ -function appendToURLPath(url, name) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let path = urlParsed.getPath(); - path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; - urlParsed.setPath(path); - const normalizedUrl = new URL(urlParsed.toString()); - return normalizedUrl.toString(); -} -/** - * Set URL parameter name and value. If name exists in URL parameters, old value - * will be replaced by name key. If not provide value, the parameter will be deleted. - * - * @param url - Source URL string - * @param name - Parameter name - * @param value - Parameter value - * @returns An updated URL string - */ -function setURLParameter(url, name, value) { - const urlParsed = coreHttp.URLBuilder.parse(url); - urlParsed.setQueryParameter(name, value); - return urlParsed.toString(); -} -/** - * Get URL parameter by name. - * - * @param url - - * @param name - - */ -function getURLParameter(url, name) { - const urlParsed = coreHttp.URLBuilder.parse(url); - return urlParsed.getQueryParameterValue(name); -} -/** - * Set URL host. - * - * @param url - Source URL string - * @param host - New host string - * @returns An updated URL string - */ -function setURLHost(url, host) { - const urlParsed = coreHttp.URLBuilder.parse(url); - urlParsed.setHost(host); - return urlParsed.toString(); -} -/** - * Get URL path from an URL string. - * - * @param url - Source URL string - */ -function getURLPath(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - return urlParsed.getPath(); -} -/** - * Get URL scheme from an URL string. - * - * @param url - Source URL string - */ -function getURLScheme(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - return urlParsed.getScheme(); -} -/** - * Get URL path and query from an URL string. - * - * @param url - Source URL string - */ -function getURLPathAndQuery(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - const pathString = urlParsed.getPath(); - if (!pathString) { - throw new RangeError("Invalid url without valid path."); +class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; } - let queryString = urlParsed.getQuery() || ""; - queryString = queryString.trim(); - if (queryString !== "") { - queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; } - return `${pathString}${queryString}`; -} -/** - * Get URL query key value pairs from an URL string. - * - * @param url - - */ -function getURLQueries(url) { - let queryString = coreHttp.URLBuilder.parse(url).getQuery(); - if (!queryString) { - return {}; + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; } - queryString = queryString.trim(); - queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; - let querySubStrings = queryString.split("&"); - querySubStrings = querySubStrings.filter((value) => { - const indexOfEqual = value.indexOf("="); - const lastIndexOfEqual = value.lastIndexOf("="); - return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); - }); - const queries = {}; - for (const querySubString of querySubStrings) { - const splitResults = querySubString.split("="); - const key = splitResults[0]; - const value = splitResults[1]; - queries[key] = value; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); } - return queries; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Append a string to URL query. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param url - Source URL string. - * @param queryParts - String to be appended to the URL query. - * @returns An updated URL string. + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas */ -function appendToURLQuery(url, queryParts) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let query = urlParsed.getQuery(); - if (query) { - query += "&" + queryParts; +class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); } - else { - query = queryParts; + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } - urlParsed.setQuery(query); - return urlParsed.toString(); -} -/** - * Rounds a date off to seconds. - * - * @param date - - * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; - * If false, YYYY-MM-DDThh:mm:ssZ will be returned. - * @returns Date string in ISO8061 format, with or without 7 milliseconds component - */ -function truncatedISO8061Date(date, withMilliseconds = true) { - // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" - const dateString = date.toISOString(); - return withMilliseconds - ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" - : dateString.substring(0, dateString.length - 5) + "Z"; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Base64 encode. + * Generate SasIPRange format string. For example: * - * @param content - - */ -function base64encode(content) { - return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64"); -} -/** - * Generate a 64 bytes base64 block ID string. + * "8.8.8.8" or "1.1.1.1-255.255.255.255" * - * @param blockIndex - + * @param ipRange - */ -function generateBlockID(blockIDPrefix, blockIndex) { - // To generate a 64 bytes base64 string, source string should be 48 - const maxSourceStringLength = 48; - // A blob can have a maximum of 100,000 uncommitted blocks at any given time - const maxBlockIndexLength = 6; - const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; - if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { - blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); - } - const res = blockIDPrefix + - padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); - return base64encode(res); +function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Delay specified time interval. - * - * @param timeInMs - - * @param aborter - - * @param abortError - + * Protocols for generated SAS. */ -async function delay(timeInMs, aborter, abortError) { - return new Promise((resolve, reject) => { - /* eslint-disable-next-line prefer-const */ - let timeout; - const abortHandler = () => { - if (timeout !== undefined) { - clearTimeout(timeout); - } - reject(abortError); - }; - const resolveHandler = () => { - if (aborter !== undefined) { - aborter.removeEventListener("abort", abortHandler); - } - resolve(); - }; - timeout = setTimeout(resolveHandler, timeInMs); - if (aborter !== undefined) { - aborter.addEventListener("abort", abortHandler); - } - }); -} +exports.SASProtocol = void 0; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(exports.SASProtocol || (exports.SASProtocol = {})); /** - * String.prototype.padStart() + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). * - * @param currentString - - * @param targetLength - - * @param padString - + * NOTE: Instances of this class are immutable. */ -function padStart(currentString, targetLength, padString = " ") { - // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes - if (String.prototype.padStart) { - return currentString.padStart(targetLength, padString); - } - padString = padString || " "; - if (currentString.length > targetLength) { - return currentString; - } - else { - targetLength = targetLength - currentString.length; - if (targetLength > padString.length) { - padString += padString.repeat(targetLength / padString.length); +class SASQueryParameters { + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; } - return padString.slice(0, targetLength) + currentString; + return undefined; } -} -/** - * If two strings are equal when compared case insensitive. - * - * @param str1 - - * @param str2 - - */ -function iEqual(str1, str2) { - return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); -} -/** - * Extracts account name from the url - * @param url - url to extract the account name from - * @returns with the account name - */ -function getAccountNameFromUrl(url) { - const parsedUrl = coreHttp.URLBuilder.parse(url); - let accountName; - try { - if (parsedUrl.getHost().split(".")[1] === "blob") { - // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; - accountName = parsedUrl.getHost().split(".")[0]; - } - else if (isIpEndpointStyle(parsedUrl)) { - // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ - // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ - // .getPath() -> /devstoreaccount1/ - accountName = parsedUrl.getPath().split("/")[1]; + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } } else { - // Custom domain case: "https://customdomain.com/containername/blob". - accountName = ""; - } - return accountName; - } - catch (error) { - throw new Error("Unable to extract accountName with provided information."); - } -} -function isIpEndpointStyle(parsedUrl) { - if (parsedUrl.getHost() === undefined) { - return false; - } - const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); - // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. - // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. - // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. - // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. - return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || - (parsedUrl.getPort() !== undefined && PathStylePorts.includes(parsedUrl.getPort()))); -} -/** - * Convert Tags to encoded string. - * - * @param tags - - */ -function toBlobTagsString(tags) { - if (tags === undefined) { - return undefined; - } - const tagPairs = []; - for (const key in tags) { - if (Object.prototype.hasOwnProperty.call(tags, key)) { - const value = tags[key]; - tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } } } - return tagPairs.join("&"); -} -/** - * Convert Tags type to BlobTags. - * - * @param tags - - */ -function toBlobTags(tags) { - if (tags === undefined) { - return undefined; - } - const res = { - blobTagSet: [], - }; - for (const key in tags) { - if (Object.prototype.hasOwnProperty.call(tags, key)) { - const value = tags[key]; - res.blobTagSet.push({ - key, - value, - }); + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", // Signed object ID + "sktid", // Signed tenant ID + "skt", // Signed key start time + "ske", // Signed key expiry time + "sks", // Signed key service + "skv", // Signed key version + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } } + return queries.join("&"); } - return res; -} -/** - * Covert BlobTags to Tags type. - * - * @param tags - - */ -function toTags(tags) { - if (tags === undefined) { - return undefined; - } - const res = {}; - for (const blobTag of tags.blobTagSet) { - res[blobTag.key] = blobTag.value; + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } } - return res; } -/** - * Convert BlobQueryTextConfiguration to QuerySerialization type. - * - * @param textConfiguration - - */ -function toQuerySerialization(textConfiguration) { - if (textConfiguration === undefined) { - return undefined; - } - switch (textConfiguration.kind) { - case "csv": - return { - format: { - type: "delimited", - delimitedTextConfiguration: { - columnSeparator: textConfiguration.columnSeparator || ",", - fieldQuote: textConfiguration.fieldQuote || "", - recordSeparator: textConfiguration.recordSeparator, - escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false, - }, - }, - }; - case "json": - return { - format: { - type: "json", - jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator, - }, - }, - }; - case "arrow": - return { - format: { - type: "arrow", - arrowConfiguration: { - schema: textConfiguration.schema, - }, - }, - }; - case "parquet": - return { - format: { - type: "parquet", - }, - }; - default: - throw Error("Invalid BlobQueryTextConfiguration."); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); } -} -function parseObjectReplicationRecord(objectReplicationRecord) { - if (!objectReplicationRecord) { - return undefined; + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); } - if ("policy-id" in objectReplicationRecord) { - // If the dictionary contains a key with policy id, we are not required to do any parsing since - // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. - return undefined; + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } } - const orProperties = []; - for (const key in objectReplicationRecord) { - const ids = key.split("_"); - const policyPrefix = "or-"; - if (ids[0].startsWith(policyPrefix)) { - ids[0] = ids[0].substring(policyPrefix.length); + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); } - const rule = { - ruleId: ids[1], - replicationStatus: objectReplicationRecord[key], - }; - const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); - if (policyIndex > -1) { - orProperties[policyIndex].rules.push(rule); + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); } else { - orProperties.push({ - policyId: ids[0], - rules: [rule], - }); + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); } } - return orProperties; + throw new RangeError("'version' must be >= '2015-04-05'."); } /** - * Attach a TokenCredential to an object. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. * - * @param thing - - * @param credential - + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - */ -function attachCredential(thing, credential) { - thing.credential = credential; - return thing; -} -function httpAuthorizationToString(httpAuthorization) { - return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; -} -function BlobNameToString(name) { - if (name.encoded) { - return decodeURIComponent(name.content); +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - else { - return name.content; + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; } -} -function ConvertInternalResponseOfListBlobFlat(internalResponse) { - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }), - } }); -} -function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { - var _a; - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }), - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }), - } }); -} -function* ExtractPageRangeInfoItems(getPageRangesSegment) { - let pageRange = []; - let clearRange = []; - if (getPageRangesSegment.pageRange) - pageRange = getPageRangesSegment.pageRange; - if (getPageRangesSegment.clearRange) - clearRange = getPageRangesSegment.clearRange; - let pageRangeIndex = 0; - let clearRangeIndex = 0; - while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { - if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false, - }; - ++pageRangeIndex; + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } else { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true, - }; - ++clearRangeIndex; + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } - for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false, - }; - } - for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true, - }; - } -} -/** - * Escape the blobName but keep path separator ('/'). - */ -function EscapePath(blobName) { - const split = blobName.split("/"); - for (let i = 0; i < split.length; i++) { - split[i] = encodeURIComponent(split[i]); - } - return split.join("/"); + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } - -// Copyright (c) Microsoft Corporation. /** - * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. * - * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. - * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL - * thus avoid the browser cache. + * Creates an instance of SASQueryParameters. * - * 2. Remove cookie header for security + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. * - * 3. Remove content-length header to avoid browsers warning + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - */ -class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { - /** - * Creates an instance of StorageBrowserPolicy. - * @param nextPolicy - - * @param options - - */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - /** - * Sends out request. - * - * @param request - - */ - async sendRequest(request) { - if (coreHttp.isNode) { - return this._nextPolicy.sendRequest(request); + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; } - if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { - request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - request.headers.remove(HeaderConstants.COOKIE); - // According to XHR standards, content-length should be fully controlled by browsers - request.headers.remove(HeaderConstants.CONTENT_LENGTH); - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. - */ -class StorageBrowserPolicyFactory { - /** - * Creates a StorageBrowserPolicyFactory object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new StorageBrowserPolicy(nextPolicy, options); } -} - -// Copyright (c) Microsoft Corporation. -/** - * RetryPolicy types. - */ -exports.StorageRetryPolicyType = void 0; -(function (StorageRetryPolicyType) { - /** - * Exponential retry. Retry time delay grows exponentially. - */ - StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; - /** - * Linear retry. Retry time delay grows linearly. - */ - StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; -})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); -// Default values of StorageRetryOptions -const DEFAULT_RETRY_OPTIONS = { - maxRetryDelayInMs: 120 * 1000, - maxTries: 4, - retryDelayInMs: 4 * 1000, - retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, - secondaryHost: "", - tryTimeoutInMs: undefined, // Use server side default timeout strategy -}; -const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} /** - * Retry policy with exponential retry and linear retry implemented. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - */ -class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { - /** - * Creates an instance of RetryPolicy. - * - * @param nextPolicy - - * @param options - - * @param retryOptions - - */ - constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { - super(nextPolicy, options); - // Initialize retry options - this.retryOptions = { - retryPolicyType: retryOptions.retryPolicyType - ? retryOptions.retryPolicyType - : DEFAULT_RETRY_OPTIONS.retryPolicyType, - maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 - ? Math.floor(retryOptions.maxTries) - : DEFAULT_RETRY_OPTIONS.maxTries, - tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 - ? retryOptions.tryTimeoutInMs - : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, - retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 - ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs - ? retryOptions.maxRetryDelayInMs - : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) - : DEFAULT_RETRY_OPTIONS.retryDelayInMs, - maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 - ? retryOptions.maxRetryDelayInMs - : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, - secondaryHost: retryOptions.secondaryHost - ? retryOptions.secondaryHost - : DEFAULT_RETRY_OPTIONS.secondaryHost, - }; - } - /** - * Sends request. - * - * @param request - - */ - async sendRequest(request) { - return this.attemptSendRequest(request, false, 1); +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - /** - * Decide and perform next retry. Won't mutate request parameter. - * - * @param request - - * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then - * the resource was not found. This may be due to replication delay. So, in this - * case, we'll never try the secondary again for this operation. - * @param attempt - How many retries has been attempted to performed, starting from 1, which includes - * the attempt will be performed by this method call. - */ - async attemptSendRequest(request, secondaryHas404, attempt) { - const newRequest = request.clone(); - const isPrimaryRetry = secondaryHas404 || - !this.retryOptions.secondaryHost || - !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || - attempt % 2 === 1; - if (!isPrimaryRetry) { - newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); - } - // Set the server-side timeout query parameter "timeout=[seconds]" - if (this.retryOptions.tryTimeoutInMs) { - newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); - } - let response; - try { - logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await this._nextPolicy.sendRequest(newRequest); - if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { - return response; - } - secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; } - catch (err) { - logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); - if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { - throw err; - } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - await this.delay(isPrimaryRetry, attempt, request.abortSignal); - return this.attemptSendRequest(request, secondaryHas404, ++attempt); } - /** - * Decide whether to retry according to last HTTP response and retry counters. - * - * @param isPrimaryRetry - - * @param attempt - - * @param response - - * @param err - - */ - shouldRetry(isPrimaryRetry, attempt, response, err) { - if (attempt >= this.retryOptions.maxTries) { - logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions - .maxTries}, no further try.`); - return false; + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - // Handle network failures, you may need to customize the list when you implement - // your own http client - const retriableErrors = [ - "ETIMEDOUT", - "ESOCKETTIMEDOUT", - "ECONNREFUSED", - "ECONNRESET", - "ENOENT", - "ENOTFOUND", - "TIMEOUT", - "EPIPE", - "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js - ]; - if (err) { - for (const retriableError of retriableErrors) { - if (err.name.toUpperCase().includes(retriableError) || - err.message.toUpperCase().includes(retriableError) || - (err.code && err.code.toString().toUpperCase() === retriableError)) { - logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); - return true; - } - } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - // If attempt was against the secondary & it returned a StatusNotFound (404), then - // the resource was not found. This may be due to replication delay. So, in this - // case, we'll never try the secondary again for this operation. - if (response || err) { - const statusCode = response ? response.status : err ? err.statusCode : 0; - if (!isPrimaryRetry && statusCode === 404) { - logger.info(`RetryPolicy: Secondary access with 404, will retry.`); - return true; - } - // Server internal error or server timeout - if (statusCode === 503 || statusCode === 500) { - logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); - return true; - } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; } - if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { - logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); - return true; + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - return false; } - /** - * Delay a calculated time between retries. - * - * @param isPrimaryRetry - - * @param attempt - - * @param abortSignal - - */ - async delay(isPrimaryRetry, attempt, abortSignal) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (this.retryOptions.retryPolicyType) { - case exports.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); - break; - case exports.StorageRetryPolicyType.FIXED: - delayTimeInMs = this.retryOptions.retryDelayInMs; - break; - } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } else { - delayTimeInMs = Math.random() * 1000; + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); } - -// Copyright (c) Microsoft Corporation. /** - * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - */ -class StorageRetryPolicyFactory { - /** - * Creates an instance of StorageRetryPolicyFactory. - * @param retryOptions - - */ - constructor(retryOptions) { - this.retryOptions = retryOptions; - } - /** - * Creates a StorageRetryPolicy object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } -} - -// Copyright (c) Microsoft Corporation. -/** - * Credential policy used to sign HTTP(S) requests before sending. This is an - * abstract class. - */ -class CredentialPolicy extends coreHttp.BaseRequestPolicy { - /** - * Sends out request. - * - * @param request - - */ - sendRequest(request) { - return this._nextPolicy.sendRequest(this.signRequest(request)); + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } } - /** - * Child classes must implement this method with request signing. This method - * will be executed in {@link sendRequest}. - * - * @param request - - */ - signRequest(request) { - // Child classes must override this method with request signing. This method - // will be executed in sendRequest(). - return request; + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); } - -// Copyright (c) Microsoft Corporation. /** - * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources - * or for use with Shared Access Signatures (SAS). + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - */ -class AnonymousCredentialPolicy extends CredentialPolicy { - /** - * Creates an instance of AnonymousCredentialPolicy. - * @param nextPolicy - - * @param options - - */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Credential is an abstract class for Azure Storage HTTP requests signing. This - * class will host an credentialPolicyCreator factory which generates CredentialPolicy. - */ -class Credential { - /** - * Creates a RequestPolicy object. - * - * @param _nextPolicy - - * @param _options - - */ - create(_nextPolicy, _options) { - throw new Error("Method should be implemented in children classes."); + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } } -} - -// Copyright (c) Microsoft Corporation. -/** - * AnonymousCredential provides a credentialPolicyCreator member used to create - * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with - * HTTP(S) requests that read public resources or for use with Shared Access - * Signatures (SAS). - */ -class AnonymousCredential extends Credential { - /** - * Creates an {@link AnonymousCredentialPolicy} object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new AnonymousCredentialPolicy(nextPolicy, options); + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, // agentObjectId + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); } - -// Copyright (c) Microsoft Corporation. -/** - * TelemetryPolicy is a policy used to tag user-agent header for every requests. - */ -class TelemetryPolicy extends coreHttp.BaseRequestPolicy { - /** - * Creates an instance of TelemetryPolicy. - * @param nextPolicy - - * @param options - - * @param telemetry - - */ - constructor(nextPolicy, options, telemetry) { - super(nextPolicy, options); - this.telemetry = telemetry; +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); } - /** - * Sends out request. - * - * @param request - - */ - async sendRequest(request) { - if (coreHttp.isNode) { - if (!request.headers) { - request.headers = new coreHttp.HttpHeaders(); - } - if (!request.headers.get(HeaderConstants.USER_AGENT)) { - request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); - } - } - return this._nextPolicy.sendRequest(request); + return elements.join(""); +} +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. */ -class TelemetryPolicyFactory { +class BlobLeaseClient { /** - * Creates an instance of TelemetryPolicyFactory. - * @param telemetry - + * Gets the lease Id. + * + * @readonly */ - constructor(telemetry) { - const userAgentInfo = []; - if (coreHttp.isNode) { - if (telemetry) { - const telemetryString = telemetry.userAgentPrefix || ""; - if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { - userAgentInfo.push(telemetryString); - } - } - // e.g. azsdk-js-storageblob/10.0.0 - const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; - if (userAgentInfo.indexOf(libInfo) === -1) { - userAgentInfo.push(libInfo); - } - // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) - let runtimeInfo = `(NODE-VERSION ${process.version})`; - if (os__namespace) { - runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; - } - if (userAgentInfo.indexOf(runtimeInfo) === -1) { - userAgentInfo.push(runtimeInfo); - } - } - this.telemetryString = userAgentInfo.join(" "); + get leaseId() { + return this._leaseId; } /** - * Creates a TelemetryPolicy object. + * Gets the url. * - * @param nextPolicy - - * @param options - + * @readonly */ - create(nextPolicy, options) { - return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + get url() { + return this._url; } -} - -// Copyright (c) Microsoft Corporation. -const _defaultHttpClient = new coreHttp.DefaultHttpClient(); -function getCachedDefaultHttpClient() { - return _defaultHttpClient; -} - -// Copyright (c) Microsoft Corporation. -/** - * A set of constants used internally when processing requests. - */ -const Constants = { - DefaultScope: "/.default", /** - * Defines constants for use with HTTP headers. + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization", - }, -}; -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry -}; -/** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token - */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } + constructor(client, leaseId) { + const clientContext = client.storageClientContext; + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = clientContext.container; } else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; + this._isContainer = false; + this._containerOrBlobOperation = clientContext.blob; } + if (!leaseId) { + leaseId = coreUtil.randomUUID(); + } + this._leaseId = leaseId; } - let token = await tryGetAccessToken(); - while (token === null) { - await coreHttp.delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); - }, - }; + return assertResponse(await this._containerOrBlobOperation.acquireLease({ + abortSignal: options.abortSignal, + duration, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + proposedLeaseId: this._leaseId, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } /** - * Starts a refresh job or returns the existing job if one is already - * running. + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; - } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -/** - * We will retrieve the challenge only if the response status code was 401, - * and if the response contained the header "WWW-Authenticate" with a non-empty value. - */ -function getChallenge(response) { - const challenge = response.headers.get("WWW-Authenticate"); - if (response.status === 401 && challenge) { - return challenge; - } - return; -} -/** - * Converts: `Bearer a="b" c="d"`. - * Into: `[ { a: 'b', c: 'd' }]`. - * - * @internal - */ -function parseChallenge(challenge) { - const bearerChallenge = challenge.slice("Bearer ".length); - const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); - const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); - // Key-value pairs to plain object: - return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - let getToken = createTokenCycler(credential, scopes); - class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const getTokenInternal = getToken; - const token = (await getTokenInternal({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - })).token; - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - const response = await this._nextPolicy.sendRequest(webResource); - if ((response === null || response === void 0 ? void 0 : response.status) === 401) { - const challenge = getChallenge(response); - if (challenge) { - const challengeInfo = parseChallenge(challenge); - const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; - const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); - const pathSegments = parsedAuthUri.getPath().split("/"); - const tenantId = pathSegments[1]; - const getTokenForChallenge = createTokenCycler(credential, challengeScopes); - const tokenForChallenge = (await getTokenForChallenge({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - tenantId: tenantId, - })).token; - getToken = getTokenForChallenge; - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); - return this._nextPolicy.sendRequest(webResource); - } - } + return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + this._leaseId = proposedLeaseId; return response; - } - } - return { - create: (nextPolicy, options) => { - return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); - }, - }; -} - -// Copyright (c) Microsoft Corporation. -/** - * A helper to decide if a given argument satisfies the Pipeline contract - * @param pipeline - An argument that may be a Pipeline - * @returns true when the argument satisfies the Pipeline contract - */ -function isPipelineLike(pipeline) { - if (!pipeline || typeof pipeline !== "object") { - return false; + }); } - const castPipeline = pipeline; - return (Array.isArray(castPipeline.factories) && - typeof castPipeline.options === "object" && - typeof castPipeline.toServiceClientOptions === "function"); -} -/** - * A Pipeline class containing HTTP request policies. - * You can create a default Pipeline by calling {@link newPipeline}. - * Or you can create a Pipeline with your own policies by the constructor of Pipeline. - * - * Refer to {@link newPipeline} and provided policies before implementing your - * customized Pipeline. - */ -class Pipeline { /** - * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * - * @param factories - - * @param options - + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. */ - constructor(factories, options = {}) { - this.factories = factories; - // when options.httpClient is not specified, passing in a DefaultHttpClient instance to - // avoid each client creating its own http client. - this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Transfer Pipeline object to ServiceClientOptions object which is required by - * ServiceClient constructor. + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * - * @returns The ServiceClientOptions object from this Pipeline. + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. */ - toServiceClientOptions() { - return { - httpClient: this.options.httpClient, - requestPolicyFactories: this.factories, - }; - } -} -/** - * Creates a new Pipeline object with Credential provided. - * - * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. - * @param pipelineOptions - Optional. Options. - * @returns A new Pipeline object. - */ -function newPipeline(credential, pipelineOptions = {}) { - var _a; - if (credential === undefined) { - credential = new AnonymousCredential(); + async renewLease(options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { + var _a; + return this._containerOrBlobOperation.renewLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + }); + }); } - // Order is important. Closer to the API at the top & closer to the network at the bottom. - // The credential's policy factory must appear close to the wire so it can sign any - // changes made by other factories (like UniqueRequestIDPolicyFactory) - const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); - const factories = [ - coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), - coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), - telemetryPolicy, - coreHttp.generateClientRequestIdPolicy(), - new StorageBrowserPolicyFactory(), - new StorageRetryPolicyFactory(pipelineOptions.retryOptions), - // Default deserializationPolicy is provided by protocol layer - // Use customized XML char key of "#" so we could deserialize metadata - // with "_" key - coreHttp.deserializationPolicy(undefined, { xmlCharKey: "#" }), - coreHttp.logPolicy({ - logger: logger.info, - allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, - }), - ]; - if (coreHttp.isNode) { - // policies only available in Node.js runtime, not in browsers - factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); - factories.push(coreHttp.disableResponseDecompressionPolicy()); + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { + var _a; + const operationOptions = { + abortSignal: options.abortSignal, + breakPeriod, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + }; + return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions)); + }); } - factories.push(coreHttp.isTokenCredential(credential) - ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) - : credential); - return new Pipeline(factories, pipelineOptions); } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. */ -class StorageSharedKeyCredentialPolicy extends CredentialPolicy { +class RetriableReadableStream extends stream.Readable { /** - * Creates an instance of StorageSharedKeyCredentialPolicy. - * @param nextPolicy - - * @param options - - * @param factory - - */ - constructor(nextPolicy, options, factory) { - super(nextPolicy, options); - this.factory = factory; - } - /** - * Signs request. - * - * @param request - - */ - signRequest(request) { - request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); - if (request.body && - (typeof request.body === "string" || request.body !== undefined) && - request.body.length > 0) { - request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); - } - const stringToSign = [ - request.method.toUpperCase(), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), - this.getHeaderValueToSign(request, HeaderConstants.DATE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.RANGE), - ].join("\n") + - "\n" + - this.getCanonicalizedHeadersString(request) + - this.getCanonicalizedResourceString(request); - const signature = this.factory.computeHMACSHA256(stringToSign); - request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); - // console.log(`[URL]:${request.url}`); - // console.log(`[HEADERS]:${request.headers.toString()}`); - // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); - // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); - return request; - } - /** - * Retrieve header value according to shared key sign rules. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key - * - * @param request - - * @param headerName - - */ - getHeaderValueToSign(request, headerName) { - const value = request.headers.get(headerName); - if (!value) { - return ""; - } - // When using version 2015-02-21 or later, if Content-Length is zero, then - // set the Content-Length part of the StringToSign to an empty string. - // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key - if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { - return ""; - } - return value; - } - /** - * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: - * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. - * 2. Convert each HTTP header name to lowercase. - * 3. Sort the headers lexicographically by header name, in ascending order. - * Each header may appear only once in the string. - * 4. Replace any linear whitespace in the header value with a single space. - * 5. Trim any whitespace around the colon in the header. - * 6. Finally, append a new-line character to each canonicalized header in the resulting list. - * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * Creates an instance of RetriableReadableStream. * - * @param request - + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - */ - getCanonicalizedHeadersString(request) { - let headersArray = request.headers.headersArray().filter((value) => { - return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); - }); - headersArray.sort((a, b) => { - return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); - }); - // Remove duplicate headers - headersArray = headersArray.filter((value, index, array) => { - if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { - return false; + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.sourceErrorOrEndHandler(); + this.source.destroy(); + return; } - return true; - }); - let canonicalizedHeadersStringToSign = ""; - headersArray.forEach((header) => { - canonicalizedHeadersStringToSign += `${header.name - .toLowerCase() - .trimRight()}:${header.value.trimLeft()}\n`; - }); - return canonicalizedHeadersStringToSign; - } - /** - * Retrieves the webResource canonicalized resource string. - * - * @param request - - */ - getCanonicalizedResourceString(request) { - const path = getURLPath(request.url) || "/"; - let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path}`; - const queries = getURLQueries(request.url); - const lowercaseQueries = {}; - if (queries) { - const queryKeys = []; - for (const key in queries) { - if (Object.prototype.hasOwnProperty.call(queries, key)) { - const lowercaseKey = key.toLowerCase(); - lowercaseQueries[lowercaseKey] = queries[key]; - queryKeys.push(lowercaseKey); + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceAbortedHandler = () => { + const abortError = new abortController.AbortError("The operation was aborted."); + this.destroy(abortError); + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); } } - queryKeys.sort(); - for (const key of queryKeys) { - canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); } - } - return canonicalizedResourceString; + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + // needed for Node14 + this.source.on("aborted", this.sourceAbortedHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + this.source.removeListener("aborted", this.sourceAbortedHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * StorageSharedKeyCredential for account key authorization of Azure Storage service. + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. */ -class StorageSharedKeyCredential extends Credential { +class BlobDownloadResponse { /** - * Creates an instance of StorageSharedKeyCredential. - * @param accountName - - * @param accountKey - + * Indicates that the service supports + * requests for partial file content. + * + * @readonly */ - constructor(accountName, accountKey) { - super(); - this.accountName = accountName; - this.accountKey = Buffer.from(accountKey, "base64"); + get acceptRanges() { + return this.originalResponse.acceptRanges; } /** - * Creates a StorageSharedKeyCredentialPolicy object. + * Returns if it was previously specified + * for the file. * - * @param nextPolicy - - * @param options - + * @readonly */ - create(nextPolicy, options) { - return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + get cacheControl() { + return this.originalResponse.cacheControl; } /** - * Generates a hash signature for an HTTP request or for a SAS. + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. * - * @param stringToSign - + * @readonly */ - computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + get contentDisposition() { + return this.originalResponse.contentDisposition; } -} - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -const packageName = "azure-storage-blob"; -const packageVersion = "12.18.0"; -class StorageClientContext extends coreHttp__namespace.ServiceClient { /** - * Initializes a new instance of the StorageClientContext class. - * @param url The URL of the service account, container, or blob that is the target of the desired - * operation. - * @param options The parameter options + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly */ - constructor(url, options) { - if (url === undefined) { - throw new Error("'url' cannot be null"); - } - // Initializing default values for options - if (!options) { - options = {}; - } - if (!options.userAgent) { - const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); - options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; - } - super(undefined, options); - this.requestContentType = "application/json; charset=utf-8"; - this.baseUri = options.endpoint || "{url}"; - // Parameter assignments - this.url = url; - // Assigning values to Constant parameters - this.version = options.version || "2024-05-04"; + get contentEncoding() { + return this.originalResponse.contentEncoding; } -} - -// Copyright (c) Microsoft Corporation. -/** - * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} - * and etc. - */ -class StorageClient { /** - * Creates an instance of StorageClient. - * @param url - url to resource - * @param pipeline - request policy pipeline. + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly */ - constructor(url, pipeline) { - // URL should be encoded and only once, protocol layer shouldn't encode URL again - this.url = escapeURLPath(url); - this.accountName = getAccountNameFromUrl(url); - this.pipeline = pipeline; - this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); - this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); - this.credential = new AnonymousCredential(); - for (const factory of this.pipeline.factories) { - if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) || - factory instanceof AnonymousCredential) { - this.credential = factory; - } - else if (coreHttp.isTokenCredential(factory.credential)) { - // Only works if the factory has been attached a "credential" property. - // We do that in newPipeline() when using TokenCredential. - this.credential = factory.credential; - } - } - // Override protocol layer's default content-type - const storageClientContext = this.storageClientContext; - storageClientContext.requestContentType = undefined; + get contentLanguage() { + return this.originalResponse.contentLanguage; } -} - -// Copyright (c) Microsoft Corporation. -/** - * Creates a span using the global tracer. - * @internal - */ -const createSpan = coreTracing.createSpanFunction({ - packagePrefix: "Azure.Storage.Blob", - namespace: "Microsoft.Storage", -}); -/** - * @internal - * - * Adapt the tracing options from OperationOptions to what they need to be for - * RequestOptionsBase (when we update to later OpenTelemetry versions this is now - * two separate fields, not just one). - */ -function convertTracingToRequestOptionsBase(options) { - var _a, _b; - return { - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, - tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting - * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all - * the values are set, this should be serialized with toString and set as the permissions field on a - * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but - * the order of the permissions is particular and this class guarantees correctness. - */ -class BlobSASPermissions { - constructor() { - /** - * Specifies Read access granted. - */ - this.read = false; - /** - * Specifies Add access granted. - */ - this.add = false; - /** - * Specifies Create access granted. - */ - this.create = false; - /** - * Specifies Write access granted. - */ - this.write = false; - /** - * Specifies Delete access granted. - */ - this.delete = false; - /** - * Specifies Delete version access granted. - */ - this.deleteVersion = false; - /** - * Specfies Tag access granted. - */ - this.tag = false; - /** - * Specifies Move access granted. - */ - this.move = false; - /** - * Specifies Execute access granted. - */ - this.execute = false; - /** - * Specifies SetImmutabilityPolicy access granted. - */ - this.setImmutabilityPolicy = false; - /** - * Specifies that Permanent Delete is permitted. - */ - this.permanentDelete = false; + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; } /** - * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. * - * @param permissions - + * @readonly */ - static parse(permissions) { - const blobSASPermissions = new BlobSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - blobSASPermissions.read = true; - break; - case "a": - blobSASPermissions.add = true; - break; - case "c": - blobSASPermissions.create = true; - break; - case "w": - blobSASPermissions.write = true; - break; - case "d": - blobSASPermissions.delete = true; - break; - case "x": - blobSASPermissions.deleteVersion = true; - break; - case "t": - blobSASPermissions.tag = true; - break; - case "m": - blobSASPermissions.move = true; - break; - case "e": - blobSASPermissions.execute = true; - break; - case "i": - blobSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - blobSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission: ${char}`); - } - } - return blobSASPermissions; + get blobType() { + return this.originalResponse.blobType; } /** - * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. + * The number of bytes present in the + * response body. * - * @param permissionLike - + * @readonly */ - static from(permissionLike) { - const blobSASPermissions = new BlobSASPermissions(); - if (permissionLike.read) { - blobSASPermissions.read = true; - } - if (permissionLike.add) { - blobSASPermissions.add = true; - } - if (permissionLike.create) { - blobSASPermissions.create = true; - } - if (permissionLike.write) { - blobSASPermissions.write = true; - } - if (permissionLike.delete) { - blobSASPermissions.delete = true; - } - if (permissionLike.deleteVersion) { - blobSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - blobSASPermissions.tag = true; - } - if (permissionLike.move) { - blobSASPermissions.move = true; - } - if (permissionLike.execute) { - blobSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - blobSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - blobSASPermissions.permanentDelete = true; - } - return blobSASPermissions; + get contentLength() { + return this.originalResponse.contentLength; } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. * - * @returns A string which represents the BlobSASPermissions + * @readonly */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.move) { - permissions.push("m"); - } - if (this.execute) { - permissions.push("e"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - return permissions.join(""); + get contentMD5() { + return this.originalResponse.contentMD5; } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. - * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. - * Once all the values are set, this should be serialized with toString and set as the permissions field on a - * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but - * the order of the permissions is particular and this class guarantees correctness. - */ -class ContainerSASPermissions { - constructor() { - /** - * Specifies Read access granted. - */ - this.read = false; - /** - * Specifies Add access granted. - */ - this.add = false; - /** - * Specifies Create access granted. - */ - this.create = false; - /** - * Specifies Write access granted. - */ - this.write = false; - /** - * Specifies Delete access granted. - */ - this.delete = false; - /** - * Specifies Delete version access granted. - */ - this.deleteVersion = false; - /** - * Specifies List access granted. - */ - this.list = false; - /** - * Specfies Tag access granted. - */ - this.tag = false; - /** - * Specifies Move access granted. - */ - this.move = false; - /** - * Specifies Execute access granted. - */ - this.execute = false; - /** - * Specifies SetImmutabilityPolicy access granted. - */ - this.setImmutabilityPolicy = false; - /** - * Specifies that Permanent Delete is permitted. - */ - this.permanentDelete = false; - /** - * Specifies that Filter Blobs by Tags is permitted. - */ - this.filterByTags = false; + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; } /** - * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. + * The content type specified for the file. + * The default content type is 'application/octet-stream' * - * @param permissions - + * @readonly */ - static parse(permissions) { - const containerSASPermissions = new ContainerSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - containerSASPermissions.read = true; - break; - case "a": - containerSASPermissions.add = true; - break; - case "c": - containerSASPermissions.create = true; - break; - case "w": - containerSASPermissions.write = true; - break; - case "d": - containerSASPermissions.delete = true; - break; - case "l": - containerSASPermissions.list = true; - break; - case "t": - containerSASPermissions.tag = true; - break; - case "x": - containerSASPermissions.deleteVersion = true; - break; - case "m": - containerSASPermissions.move = true; - break; - case "e": - containerSASPermissions.execute = true; - break; - case "i": - containerSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - containerSASPermissions.permanentDelete = true; - break; - case "f": - containerSASPermissions.filterByTags = true; - break; - default: - throw new RangeError(`Invalid permission ${char}`); - } - } - return containerSASPermissions; + get contentType() { + return this.originalResponse.contentType; } /** - * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. * - * @param permissionLike - + * @readonly */ - static from(permissionLike) { - const containerSASPermissions = new ContainerSASPermissions(); - if (permissionLike.read) { - containerSASPermissions.read = true; - } - if (permissionLike.add) { - containerSASPermissions.add = true; - } - if (permissionLike.create) { - containerSASPermissions.create = true; - } - if (permissionLike.write) { - containerSASPermissions.write = true; - } - if (permissionLike.delete) { - containerSASPermissions.delete = true; - } - if (permissionLike.list) { - containerSASPermissions.list = true; - } - if (permissionLike.deleteVersion) { - containerSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - containerSASPermissions.tag = true; - } - if (permissionLike.move) { - containerSASPermissions.move = true; - } - if (permissionLike.execute) { - containerSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - containerSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - containerSASPermissions.permanentDelete = true; - } - if (permissionLike.filterByTags) { - containerSASPermissions.filterByTags = true; - } - return containerSASPermissions; + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. + * String identifier for the last attempted Copy + * File operation where this file was the destination file. * - * The order of the characters should be as specified here to ensure correctness. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. * + * @readonly */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.list) { - permissions.push("l"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.move) { - permissions.push("m"); - } - if (this.execute) { - permissions.push("e"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - if (this.filterByTags) { - permissions.push("f"); - } - return permissions.join(""); + get copyProgress() { + return this.originalResponse.copyProgress; } -} - -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * UserDelegationKeyCredential is only used for generation of user delegation SAS. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas - */ -class UserDelegationKeyCredential { /** - * Creates an instance of UserDelegationKeyCredential. - * @param accountName - - * @param userDelegationKey - + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly */ - constructor(accountName, userDelegationKey) { - this.accountName = accountName; - this.userDelegationKey = userDelegationKey; - this.key = Buffer.from(userDelegationKey.value, "base64"); + get copySource() { + return this.originalResponse.copySource; } /** - * Generates a hash signature for an HTTP request or for a SAS. + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' * - * @param stringToSign - + * @readonly */ - computeHMACSHA256(stringToSign) { - // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); - return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + get copyStatus() { + return this.originalResponse.copyStatus; } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Generate SasIPRange format string. For example: - * - * "8.8.8.8" or "1.1.1.1-255.255.255.255" - * - * @param ipRange - - */ -function ipRangeToString(ipRange) { - return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; -} - -// Copyright (c) Microsoft Corporation. -/** - * Protocols for generated SAS. - */ -exports.SASProtocol = void 0; -(function (SASProtocol) { /** - * Protocol that allows HTTPS only + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly */ - SASProtocol["Https"] = "https"; + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } /** - * Protocol that allows both HTTPS and HTTP + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly */ - SASProtocol["HttpsAndHttp"] = "https,http"; -})(exports.SASProtocol || (exports.SASProtocol = {})); -/** - * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly - * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} - * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should - * be taken here in case there are existing query parameters, which might affect the appropriate means of appending - * these query parameters). - * - * NOTE: Instances of this class are immutable. - */ -class SASQueryParameters { - constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { - this.version = version; - this.signature = signature; - if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { - // SASQueryParametersOptions - this.permissions = permissionsOrOptions.permissions; - this.services = permissionsOrOptions.services; - this.resourceTypes = permissionsOrOptions.resourceTypes; - this.protocol = permissionsOrOptions.protocol; - this.startsOn = permissionsOrOptions.startsOn; - this.expiresOn = permissionsOrOptions.expiresOn; - this.ipRangeInner = permissionsOrOptions.ipRange; - this.identifier = permissionsOrOptions.identifier; - this.encryptionScope = permissionsOrOptions.encryptionScope; - this.resource = permissionsOrOptions.resource; - this.cacheControl = permissionsOrOptions.cacheControl; - this.contentDisposition = permissionsOrOptions.contentDisposition; - this.contentEncoding = permissionsOrOptions.contentEncoding; - this.contentLanguage = permissionsOrOptions.contentLanguage; - this.contentType = permissionsOrOptions.contentType; - if (permissionsOrOptions.userDelegationKey) { - this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; - this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; - this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; - this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; - this.signedService = permissionsOrOptions.userDelegationKey.signedService; - this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; - this.correlationId = permissionsOrOptions.correlationId; - } - } - else { - this.services = services; - this.resourceTypes = resourceTypes; - this.expiresOn = expiresOn; - this.permissions = permissionsOrOptions; - this.protocol = protocol; - this.startsOn = startsOn; - this.ipRangeInner = ipRange; - this.encryptionScope = encryptionScope; - this.identifier = identifier; - this.resource = resource; - this.cacheControl = cacheControl; - this.contentDisposition = contentDisposition; - this.contentEncoding = contentEncoding; - this.contentLanguage = contentLanguage; - this.contentType = contentType; - if (userDelegationKey) { - this.signedOid = userDelegationKey.signedObjectId; - this.signedTenantId = userDelegationKey.signedTenantId; - this.signedStartsOn = userDelegationKey.signedStartsOn; - this.signedExpiresOn = userDelegationKey.signedExpiresOn; - this.signedService = userDelegationKey.signedService; - this.signedVersion = userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; - this.correlationId = correlationId; - } - } + get leaseDuration() { + return this.originalResponse.leaseDuration; } /** - * Optional. IP range allowed for this SAS. + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. * * @readonly */ - get ipRange() { - if (this.ipRangeInner) { - return { - end: this.ipRangeInner.end, - start: this.ipRangeInner.start, - }; - } - return undefined; + get leaseState() { + return this.originalResponse.leaseState; } /** - * Encodes all SAS query parameters into a string that can be appended to a URL. + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. * + * @readonly */ - toString() { - const params = [ - "sv", - "ss", - "srt", - "spr", - "st", - "se", - "sip", - "si", - "ses", - "skoid", - "sktid", - "skt", - "ske", - "sks", - "skv", - "sr", - "sp", - "sig", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "saoid", - "scid", - ]; - const queries = []; - for (const param of params) { - switch (param) { - case "sv": - this.tryAppendQueryParameter(queries, param, this.version); - break; - case "ss": - this.tryAppendQueryParameter(queries, param, this.services); - break; - case "srt": - this.tryAppendQueryParameter(queries, param, this.resourceTypes); - break; - case "spr": - this.tryAppendQueryParameter(queries, param, this.protocol); - break; - case "st": - this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); - break; - case "se": - this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); - break; - case "sip": - this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); - break; - case "si": - this.tryAppendQueryParameter(queries, param, this.identifier); - break; - case "ses": - this.tryAppendQueryParameter(queries, param, this.encryptionScope); - break; - case "skoid": // Signed object ID - this.tryAppendQueryParameter(queries, param, this.signedOid); - break; - case "sktid": // Signed tenant ID - this.tryAppendQueryParameter(queries, param, this.signedTenantId); - break; - case "skt": // Signed key start time - this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); - break; - case "ske": // Signed key expiry time - this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); - break; - case "sks": // Signed key service - this.tryAppendQueryParameter(queries, param, this.signedService); - break; - case "skv": // Signed key version - this.tryAppendQueryParameter(queries, param, this.signedVersion); - break; - case "sr": - this.tryAppendQueryParameter(queries, param, this.resource); - break; - case "sp": - this.tryAppendQueryParameter(queries, param, this.permissions); - break; - case "sig": - this.tryAppendQueryParameter(queries, param, this.signature); - break; - case "rscc": - this.tryAppendQueryParameter(queries, param, this.cacheControl); - break; - case "rscd": - this.tryAppendQueryParameter(queries, param, this.contentDisposition); - break; - case "rsce": - this.tryAppendQueryParameter(queries, param, this.contentEncoding); - break; - case "rscl": - this.tryAppendQueryParameter(queries, param, this.contentLanguage); - break; - case "rsct": - this.tryAppendQueryParameter(queries, param, this.contentType); - break; - case "saoid": - this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); - break; - case "scid": - this.tryAppendQueryParameter(queries, param, this.correlationId); - break; - } - } - return queries.join("&"); + get leaseStatus() { + return this.originalResponse.leaseStatus; } /** - * A private helper method used to filter and append query key/value pairs into an array. + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. * - * @param queries - - * @param key - - * @param value - + * @readonly */ - tryAppendQueryParameter(queries, key, value) { - if (!value) { - return; - } - key = encodeURIComponent(key); - value = encodeURIComponent(value); - if (key.length > 0 && value.length > 0) { - queries.push(`${key}=${value}`); - } + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * Returns the date and time the blob was created. + * + * @readonly + */ + get createdOn() { + return this.originalResponse.createdOn; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; + } + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; + } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return coreUtil.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); } } // Copyright (c) Microsoft Corporation. -function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential - ? sharedKeyCredentialOrUserDelegationKey - : undefined; - let userDelegationKeyCredential; - if (sharedKeyCredential === undefined && accountName !== undefined) { - userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); +// Licensed under the MIT license. +const AVRO_SYNC_MARKER_SIZE = 16; +const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +const AVRO_CODEC_KEY = "avro.codec"; +const AVRO_SCHEMA_KEY = "avro.schema"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; } - if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { - throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; } - // Version 2020-12-06 adds support for encryptionscope in SAS. - if (version >= "2020-12-06") { - if (sharedKeyCredential !== undefined) { - return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + // eslint-disable-next-line no-self-assign + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } + else if (b === 0) { + return false; } else { - return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + throw new Error("Byte was not a boolean."); } } - // Version 2019-12-12 adds support for the blob tags permission. - // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. - // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string - if (version >= "2018-11-09") { - if (sharedKeyCredential !== undefined) { - return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); } - else { - // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. - if (version >= "2020-02-10") { - return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + return stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; } - else { - return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); } } + return items; } - if (version >= "2015-04-05") { - if (sharedKeyCredential !== undefined) { - return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); } else { - throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + return AvroType.fromObjectSchema(schema); } } - throw new RangeError("'version' must be >= '2015-04-05'."); -} -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn and identifier. - * - * WARNING: When identifier is not provided, permissions and expiresOn are required. - * You MUST assign value to identifier or expiresOn & permissions manually if you initial with - * this constructor. - * - * @param blobSASSignatureValues - - * @param sharedKeyCredential - - */ -function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && - !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } } - let resource = "c"; - if (blobSASSignatureValues.blobName) { - resource = "b"; + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); } - else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + catch (err) { + // eslint-disable-line no-empty + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + // eslint-disable-next-line no-case-declarations + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); } } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn and identifier. - * - * WARNING: When identifier is not provided, permissions and expiresOn are required. - * You MUST assign value to identifier or expiresOn & permissions manually if you initial with - * this constructor. - * - * @param blobSASSignatureValues - - * @param sharedKeyCredential - - */ -function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && - !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); } - else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + } +} +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; + } +} +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; + } + async read(stream, options = {}) { + // eslint-disable-line @typescript-eslint/ban-types + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); + } +} +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); + } +} +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + async read(stream, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } } + return record; } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function arraysEqual(a, b) { + if (a === b) + return true; + // eslint-disable-next-line eqeqeq + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroReader { + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); } - else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === undefined || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } } } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects() { + return tslib.__asyncGenerator(this, arguments, function* parseObjects_1(options = {}) { + if (!this._initialized) { + yield tslib.__await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (err) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield tslib.__await(result); + } + }); + } } -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn and identifier. - * - * WARNING: When identifier is not provided, permissions and expiresOn are required. - * You MUST assign value to identifier or expiresOn & permissions manually if you initial with - * this constructor. - * - * @param blobSASSignatureValues - - * @param sharedKeyCredential - - */ -function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && - !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroReadable { +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); +class AvroReadableFromStream extends AvroReadable { + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; } - else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); } - } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + /* eslint-disable @typescript-eslint/no-use-before-define */ + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + // callbackChunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + /* eslint-enable @typescript-eslint/no-use-before-define */ + }); } } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn. - * - * WARNING: identifier will be ignored, permissions and expiresOn are required. * - * @param blobSASSignatureValues - - * @param userDelegationKeyCredential - + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. */ -function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - // Stored access policies are not supported for a user delegation SAS. - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); +class BlobQuickQueryStream extends stream.Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } - else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); } } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType, - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); -} -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn. - * - * WARNING: identifier will be ignored, permissions and expiresOn are required. - * - * @param blobSASSignatureValues - - * @param userDelegationKeyCredential - - */ -function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - // Stored access policies are not supported for a user delegation SAS. - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); - } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } - else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } - } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - undefined, - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType, - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); -} -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. - * - * Creates an instance of SASQueryParameters. - * - * Only accepts required settings needed to create a SAS. For optional settings please - * set corresponding properties directly, such as permissions, startsOn. - * - * WARNING: identifier will be ignored, permissions and expiresOn are required. - * - * @param blobSASSignatureValues - - * @param userDelegationKeyCredential - - */ -function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - // Stored access policies are not supported for a user delegation SAS. - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); - } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } - else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } - } - // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } - } - // Signature is generated on the un-url-encoded values. - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn - ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) - : "", - blobSASSignatureValues.expiresOn - ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) - : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn - ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) - : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - undefined, - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType, - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); -} -function getCanonicalName(accountName, containerName, blobName) { - // Container: "/blob/account/containerName" - // Blob: "/blob/account/containerName/blobName" - const elements = [`/blob/${accountName}/${containerName}`]; - if (blobName) { - elements.push(`/${blobName}`); - } - return elements.join(""); -} -function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { - const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { - throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); - } - if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { - throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); - } - if (blobSASSignatureValues.versionId && version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); - } - if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { - throw RangeError("Must provide 'blobName' when providing 'versionId'."); - } - if (blobSASSignatureValues.permissions && - blobSASSignatureValues.permissions.setImmutabilityPolicy && - version < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); - } - if (blobSASSignatureValues.permissions && - blobSASSignatureValues.permissions.deleteVersion && - version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); - } - if (blobSASSignatureValues.permissions && - blobSASSignatureValues.permissions.permanentDelete && - version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); - } - if (blobSASSignatureValues.permissions && - blobSASSignatureValues.permissions.tag && - version < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); - } - if (version < "2020-02-10" && - blobSASSignatureValues.permissions && - (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { - throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); - } - if (version < "2021-04-10" && - blobSASSignatureValues.permissions && - blobSASSignatureValues.permissions.filterByTags) { - throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); - } - if (version < "2020-02-10" && - (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { - throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); - } - if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); } - blobSASSignatureValues.version = version; - return blobSASSignatureValues; } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. */ -class BlobLeaseClient { +class BlobQueryResponse { /** - * Creates an instance of BlobLeaseClient. - * @param client - The client to make the lease operation requests. - * @param leaseId - Initial proposed lease id. + * Indicates that the service supports + * requests for partial file content. + * + * @readonly */ - constructor(client, leaseId) { - const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); - this._url = client.url; - if (client.name === undefined) { - this._isContainer = true; - this._containerOrBlobOperation = new Container(clientContext); - } - else { - this._isContainer = false; - this._containerOrBlobOperation = new Blob$1(clientContext); - } - if (!leaseId) { - leaseId = coreHttp.generateUuid(); - } - this._leaseId = leaseId; + get acceptRanges() { + return this.originalResponse.acceptRanges; } /** - * Gets the lease Id. + * Returns if it was previously specified + * for the file. * * @readonly */ - get leaseId() { - return this._leaseId; + get cacheControl() { + return this.originalResponse.cacheControl; } /** - * Gets the url. + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. * * @readonly */ - get url() { - return this._url; + get contentDisposition() { + return this.originalResponse.contentDisposition; } /** - * Establishes and manages a lock on a container for delete operations, or on a blob - * for write and delete operations. - * The lock duration can be 15 to 60 seconds, or can be infinite. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * Returns the value that was specified + * for the Content-Encoding request header. * - * @param duration - Must be between 15 to 60 seconds, or infinite (-1) - * @param options - option to configure lease management operations. - * @returns Response data for acquire lease operation. + * @readonly */ - async acquireLease(duration, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + get contentEncoding() { + return this.originalResponse.contentEncoding; } /** - * To change the ID of the lease. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * Returns the value that was specified + * for the Content-Language request header. * - * @param proposedLeaseId - the proposed new lease Id. - * @param options - option to configure lease management operations. - * @returns Response data for change lease operation. + * @readonly */ - async changeLease(proposedLeaseId, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - this._leaseId = proposedLeaseId; - return response; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + get contentLanguage() { + return this.originalResponse.contentLanguage; } /** - * To free the lease if it is no longer needed so that another client may - * immediately acquire a lease against the container or the blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. * - * @param options - option to configure lease management operations. - * @returns Response data for release lease operation. + * @readonly */ - async releaseLease(options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; } /** - * To renew the lease. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. * - * @param options - Optional option to configure lease management operations. - * @returns Response data for renew lease operation. + * @readonly */ - async renewLease(options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + get blobType() { + return this.originalResponse.blobType; } /** - * To end the lease but ensure that another client cannot acquire a new lease - * until the current lease period has expired. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * The number of bytes present in the + * response body. * - * @param breakPeriod - Break period - * @param options - Optional options to configure lease management operations. - * @returns Response data for break lease operation. - */ - async breakLease(breakPeriod, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); - return await this._containerOrBlobOperation.breakLease(operationOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } -} - -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. - */ -class RetriableReadableStream extends stream.Readable { - /** - * Creates an instance of RetriableReadableStream. - * - * @param source - The current ReadableStream returned from getter - * @param getter - A method calling downloading request returning - * a new ReadableStream from specified offset - * @param offset - Offset position in original data source to read - * @param count - How much data in original data source to read - * @param options - - */ - constructor(source, getter, offset, count, options = {}) { - super({ highWaterMark: options.highWaterMark }); - this.retries = 0; - this.sourceDataHandler = (data) => { - if (this.options.doInjectErrorOnce) { - this.options.doInjectErrorOnce = undefined; - this.source.pause(); - this.source.removeAllListeners("data"); - this.source.emit("end"); - return; - } - // console.log( - // `Offset: ${this.offset}, Received ${data.length} from internal stream` - // ); - this.offset += data.length; - if (this.onProgress) { - this.onProgress({ loadedBytes: this.offset - this.start }); - } - if (!this.push(data)) { - this.source.pause(); - } - }; - this.sourceErrorOrEndHandler = (err) => { - if (err && err.name === "AbortError") { - this.destroy(err); - return; - } - // console.log( - // `Source stream emits end or error, offset: ${ - // this.offset - // }, dest end : ${this.end}` - // ); - this.removeSourceEventHandlers(); - if (this.offset - 1 === this.end) { - this.push(null); - } - else if (this.offset <= this.end) { - // console.log( - // `retries: ${this.retries}, max retries: ${this.maxRetries}` - // ); - if (this.retries < this.maxRetryRequests) { - this.retries += 1; - this.getter(this.offset) - .then((newSource) => { - this.source = newSource; - this.setSourceEventHandlers(); - return; - }) - .catch((error) => { - this.destroy(error); - }); - } - else { - this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); - } - } - else { - this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); - } - }; - this.getter = getter; - this.source = source; - this.start = offset; - this.offset = offset; - this.end = offset + count - 1; - this.maxRetryRequests = - options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; - this.onProgress = options.onProgress; - this.options = options; - this.setSourceEventHandlers(); - } - _read() { - this.source.resume(); - } - setSourceEventHandlers() { - this.source.on("data", this.sourceDataHandler); - this.source.on("end", this.sourceErrorOrEndHandler); - this.source.on("error", this.sourceErrorOrEndHandler); - } - removeSourceEventHandlers() { - this.source.removeListener("data", this.sourceDataHandler); - this.source.removeListener("end", this.sourceErrorOrEndHandler); - this.source.removeListener("error", this.sourceErrorOrEndHandler); - } - _destroy(error, callback) { - // remove listener from source and release source - this.removeSourceEventHandlers(); - this.source.destroy(); - callback(error === null ? undefined : error); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will - * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot - * trigger retries defined in pipeline retry policy.) - * - * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js - * Readable stream. - */ -class BlobDownloadResponse { - /** - * Creates an instance of BlobDownloadResponse. - * - * @param originalResponse - - * @param getter - - * @param offset - - * @param count - - * @param options - - */ - constructor(originalResponse, getter, offset, count, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); - } - /** - * Indicates that the service supports - * requests for partial file content. - * - * @readonly - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; - } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; - } - /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. - * - * @readonly - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; - } - /** - * Returns the value that was specified - * for the Content-Encoding request header. - * - * @readonly - */ - get contentEncoding() { - return this.originalResponse.contentEncoding; - } - /** - * Returns the value that was specified - * for the Content-Language request header. - * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } - /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. - * - * @readonly - */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; - } - /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. - * - * @readonly - */ - get blobType() { - return this.originalResponse.blobType; - } - /** - * The number of bytes present in the - * response body. - * - * @readonly + * @readonly */ get contentLength() { return this.originalResponse.contentLength; @@ -28841,7 +24181,7 @@ class BlobDownloadResponse { * @readonly */ get copyCompletedOn() { - return this.originalResponse.copyCompletedOn; + return undefined; } /** * String identifier for the last attempted Copy @@ -28948,14 +24288,6 @@ class BlobDownloadResponse { get etag() { return this.originalResponse.etag; } - /** - * The number of tags associated with the blob - * - * @readonly - */ - get tagCount() { - return this.originalResponse.tagCount; - } /** * The error code. * @@ -28998,23 +24330,6 @@ class BlobDownloadResponse { get lastModified() { return this.originalResponse.lastModified; } - /** - * Returns the UTC date and time generated by the service that indicates the time at which the blob was - * last read or written to. - * - * @readonly - */ - get lastAccessed() { - return this.originalResponse.lastAccessed; - } - /** - * Returns the date and time the blob was created. - * - * @readonly - */ - get createdOn() { - return this.originalResponse.createdOn; - } /** * A name-value pair * to associate with a file storage object. @@ -29043,7 +24358,7 @@ class BlobDownloadResponse { return this.originalResponse.clientRequestId; } /** - * Indicates the version of the Blob service used + * Indicates the version of the File service used * to execute the request. * * @readonly @@ -29051,22 +24366,6 @@ class BlobDownloadResponse { get version() { return this.originalResponse.version; } - /** - * Indicates the versionId of the downloaded blob version. - * - * @readonly - */ - get versionId() { - return this.originalResponse.versionId; - } - /** - * Indicates whether version of this blob is a current version. - * - * @readonly - */ - get isCurrentVersion() { - return this.originalResponse.isCurrentVersion; - } /** * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned * when the blob was encrypted with a customer-provided key. @@ -29086,3056 +24385,4085 @@ class BlobDownloadResponse { return this.originalResponse.contentCrc64; } /** - * Object Replication Policy Id of the destination blob. + * The response body as a browser Blob. + * Always undefined in node.js. * * @readonly */ - get objectReplicationDestinationPolicyId() { - return this.originalResponse.objectReplicationDestinationPolicyId; + get blobBody() { + return undefined; } /** - * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. * * @readonly */ - get objectReplicationSourceProperties() { - return this.originalResponse.objectReplicationSourceProperties; + get readableStreamBody() { + return coreUtil.isNode ? this.blobDownloadStream : undefined; } /** - * If this blob has been sealed. - * - * @readonly + * The HTTP response. */ - get isSealed() { - return this.originalResponse.isSealed; + get _response() { + return this.originalResponse._response; } /** - * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * Creates an instance of BlobQueryResponse. * - * @readonly + * @param originalResponse - + * @param options - */ - get immutabilityPolicyExpiresOn() { - return this.originalResponse.immutabilityPolicyExpiresOn; + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; +(function (BlockBlobTier) { /** - * Indicates immutability policy mode. - * - * @readonly + * Optimized for storing data that is accessed frequently. */ - get immutabilityPolicyMode() { - return this.originalResponse.immutabilityPolicyMode; - } + BlockBlobTier["Hot"] = "Hot"; /** - * Indicates if a legal hold is present on the blob. - * - * @readonly + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. */ - get legalHold() { - return this.originalResponse.legalHold; - } + BlockBlobTier["Cool"] = "Cool"; /** - * The response body as a browser Blob. - * Always undefined in node.js. - * - * @readonly + * Optimized for storing data that is rarely accessed. */ - get contentAsBlob() { - return this.originalResponse.blobBody; - } + BlockBlobTier["Cold"] = "Cold"; /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. - * - * It will automatically retry when internal read stream unexpected ends. - * - * @readonly + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : undefined; - } + BlockBlobTier["Archive"] = "Archive"; +})(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; +(function (PremiumPageBlobTier) { /** - * The HTTP response. + * P4 Tier. */ - get _response() { - return this.originalResponse._response; + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); +function toAccessTier(tier) { + if (tier === undefined) { + return undefined; + } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). +} +function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; } } +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); +/** + * + * To get OAuth audience for a storage account for blob service. + */ +function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; +} // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const AVRO_SYNC_MARKER_SIZE = 16; -const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); -const AVRO_CODEC_KEY = "avro.codec"; -const AVRO_SCHEMA_KEY = "avro.schema"; +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); +} // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -class AvroParser { - /** - * Reads a fixed number of bytes from the stream. - * - * @param stream - - * @param length - - * @param options - - */ - static async readFixedBytes(stream, length, options = {}) { - const bytes = await stream.read(length, { abortSignal: options.abortSignal }); - if (bytes.length !== length) { - throw new Error("Hit stream end."); +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +class BlobBeginCopyFromUrlPoller extends coreLro.Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; } - return bytes; + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; } - /** - * Reads a single byte from the stream. - * - * @param stream - - * @param options - - */ - static async readByte(stream, options = {}) { - const buf = await AvroParser.readFixedBytes(stream, 1, options); - return buf[0]; + delay() { + return coreUtil.delay(this.intervalInMs); } - // int and long are stored in variable-length zig-zag coding. - // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt - // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types - static async readZigZagLong(stream, options = {}) { - let zigZagEncoded = 0; - let significanceInBit = 0; - let byte, haveMoreByte, significanceInFloat; - do { - byte = await AvroParser.readByte(stream, options); - haveMoreByte = byte & 0x80; - zigZagEncoded |= (byte & 0x7f) << significanceInBit; - significanceInBit += 7; - } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers - if (haveMoreByte) { - // Switch to float arithmetic - // eslint-disable-next-line no-self-assign - zigZagEncoded = zigZagEncoded; - significanceInFloat = 268435456; // 2 ** 28. - do { - byte = await AvroParser.readByte(stream, options); - zigZagEncoded += (byte & 0x7f) * significanceInFloat; - significanceInFloat *= 128; // 2 ** 7 - } while (byte & 0x80); - const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; - if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { - throw new Error("Integer overflow."); - } - return res; - } - return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); - } - static async readLong(stream, options = {}) { - return AvroParser.readZigZagLong(stream, options); - } - static async readInt(stream, options = {}) { - return AvroParser.readZigZagLong(stream, options); +} +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); } - static async readNull() { - return null; + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); } - static async readBoolean(stream, options = {}) { - const b = await AvroParser.readByte(stream, options); - if (b === 1) { - return true; + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; } - else if (b === 0) { - return false; + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } } - else { - throw new Error("Byte was not a boolean."); + catch (err) { + state.error = err; + state.isCompleted = true; } } - static async readFloat(stream, options = {}) { - const u8arr = await AvroParser.readFixedBytes(stream, 4, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat32(0, true); // littleEndian = true + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); } - static async readDouble(stream, options = {}) { - const u8arr = await AvroParser.readFixedBytes(stream, 8, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat64(0, true); // littleEndian = true + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); } - static async readBytes(stream, options = {}) { - const size = await AvroParser.readLong(stream, options); - if (size < 0) { - throw new Error("Bytes size was negative."); + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// In browser, during webpack or browserify bundling, this module will be replaced by 'events' +// https://github.com/Gozala/events +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); } - return stream.read(size, { abortSignal: options.abortSignal }); + this.concurrency = concurrency; + this.emitter = new events.EventEmitter(); } - static async readString(stream, options = {}) { - const u8arr = await AvroParser.readBytes(stream, options); - const utf8decoder = new TextDecoder(); - return utf8decoder.decode(u8arr); + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); } - static async readMapPair(stream, readItemMethod, options = {}) { - const key = await AvroParser.readString(stream, options); - // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. - const value = await readItemMethod(stream, options); - return { key, value }; + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); } - static async readMap(stream, readItemMethod, options = {}) { - const readPairMethod = (s, opts = {}) => { - return AvroParser.readMapPair(s, readItemMethod, opts); - }; - const pairs = await AvroParser.readArray(stream, readPairMethod, options); - const dict = {}; - for (const pair of pairs) { - dict[pair.key] = pair.value; + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; } - return dict; + return null; } - static async readArray(stream, readItemMethod, options = {}) { - const items = []; - for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { - if (count < 0) { - // Ignore block sizes - await AvroParser.readLong(stream, options); - count = -count; + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); } - while (count--) { - const item = await readItemMethod(stream, options); - items.push(item); + else { + return; } } - return items; } } -var AvroComplex; -(function (AvroComplex) { - AvroComplex["RECORD"] = "record"; - AvroComplex["ENUM"] = "enum"; - AvroComplex["ARRAY"] = "array"; - AvroComplex["MAP"] = "map"; - AvroComplex["UNION"] = "union"; - AvroComplex["FIXED"] = "fixed"; -})(AvroComplex || (AvroComplex = {})); -var AvroPrimitive; -(function (AvroPrimitive) { - AvroPrimitive["NULL"] = "null"; - AvroPrimitive["BOOLEAN"] = "boolean"; - AvroPrimitive["INT"] = "int"; - AvroPrimitive["LONG"] = "long"; - AvroPrimitive["FLOAT"] = "float"; - AvroPrimitive["DOUBLE"] = "double"; - AvroPrimitive["BYTES"] = "bytes"; - AvroPrimitive["STRING"] = "string"; -})(AvroPrimitive || (AvroPrimitive = {})); -class AvroType { + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This class generates a readable stream from the data in an array of buffers. + */ +class BuffersStream extends stream.Readable { /** - * Determines the AvroType from the Avro Schema. + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers */ - static fromSchema(schema) { - if (typeof schema === "string") { - return AvroType.fromStringSchema(schema); - } - else if (Array.isArray(schema)) { - return AvroType.fromArraySchema(schema); - } - else { - return AvroType.fromObjectSchema(schema); + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; } - } - static fromStringSchema(schema) { - switch (schema) { - case AvroPrimitive.NULL: - case AvroPrimitive.BOOLEAN: - case AvroPrimitive.INT: - case AvroPrimitive.LONG: - case AvroPrimitive.FLOAT: - case AvroPrimitive.DOUBLE: - case AvroPrimitive.BYTES: - case AvroPrimitive.STRING: - return new AvroPrimitiveType(schema); - default: - throw new Error(`Unexpected Avro type ${schema}`); + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); } } - static fromArraySchema(schema) { - return new AvroUnionType(schema.map(AvroType.fromSchema)); - } - static fromObjectSchema(schema) { - const type = schema.type; - // Primitives can be defined as strings or objects - try { - return AvroType.fromStringSchema(type); + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); } - catch (err) { - // eslint-disable-line no-empty + if (!size) { + size = this.readableHighWaterMark; } - switch (type) { - case AvroComplex.RECORD: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.name) { - throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); - } - // eslint-disable-next-line no-case-declarations - const fields = {}; - if (!schema.fields) { - throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); - } - for (const field of schema.fields) { - fields[field.name] = AvroType.fromSchema(field.type); - } - return new AvroRecordType(fields, schema.name); - case AvroComplex.ENUM: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.symbols) { - throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; } - return new AvroEnumType(schema.symbols); - case AvroComplex.MAP: - if (!schema.values) { - throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + else { + this.byteOffsetInCurrentBuffer = end; } - return new AvroMapType(AvroType.fromSchema(schema.values)); - case AvroComplex.ARRAY: // Unused today - case AvroComplex.FIXED: // Unused today - default: - throw new Error(`Unexpected Avro type ${type} in ${schema}`); + this.pushedBytesLength += remaining; + i += remaining; + } } - } -} -class AvroPrimitiveType extends AvroType { - constructor(primitive) { - super(); - this._primitive = primitive; - } - read(stream, options = {}) { - switch (this._primitive) { - case AvroPrimitive.NULL: - return AvroParser.readNull(); - case AvroPrimitive.BOOLEAN: - return AvroParser.readBoolean(stream, options); - case AvroPrimitive.INT: - return AvroParser.readInt(stream, options); - case AvroPrimitive.LONG: - return AvroParser.readLong(stream, options); - case AvroPrimitive.FLOAT: - return AvroParser.readFloat(stream, options); - case AvroPrimitive.DOUBLE: - return AvroParser.readDouble(stream, options); - case AvroPrimitive.BYTES: - return AvroParser.readBytes(stream, options); - case AvroPrimitive.STRING: - return AvroParser.readString(stream, options); - default: - throw new Error("Unknown Avro Primitive"); + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); } } } -class AvroEnumType extends AvroType { - constructor(symbols) { - super(); - this._symbols = symbols; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const maxBufferLength = buffer.constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +class PooledBuffer { + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; } - async read(stream, options = {}) { - const value = await AvroParser.readInt(stream, options); - return this._symbols[value]; + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } } -} -class AvroUnionType extends AvroType { - constructor(types) { - super(); - this._types = types; - } - async read(stream, options = {}) { - // eslint-disable-line @typescript-eslint/ban-types - const typeIndex = await AvroParser.readInt(stream, options); - return this._types[typeIndex].read(stream, options); - } -} -class AvroMapType extends AvroType { - constructor(itemType) { - super(); - this._itemType = itemType; - } - read(stream, options = {}) { - const readItemMethod = (s, opts) => { - return this._itemType.read(s, opts); - }; - return AvroParser.readMap(stream, readItemMethod, options); - } -} -class AvroRecordType extends AvroType { - constructor(fields, name) { - super(); - this._fields = fields; - this._name = name; - } - async read(stream, options = {}) { - const record = {}; - record["$schema"] = this._name; - for (const key in this._fields) { - if (Object.prototype.hasOwnProperty.call(this._fields, key)) { - record[key] = await this._fields[key].read(stream, options); + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; } } - return record; + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function arraysEqual(a, b) { - if (a === b) - return true; - // eslint-disable-next-line eqeqeq - if (a == null || b == null) - return false; - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; ++i) { - if (a[i] !== b[i]) - return false; + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); } - return true; } // Copyright (c) Microsoft Corporation. -class AvroReader { - constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { - this._dataStream = dataStream; - this._headerStream = headerStream || dataStream; - this._initialized = false; - this._blockOffset = currentBlockOffset || 0; - this._objectIndex = indexWithinCurrentBlock || 0; - this._initialBlockOffset = currentBlockOffset || 0; - } - get blockOffset() { - return this._blockOffset; - } - get objectIndex() { - return this._objectIndex; - } - async initialize(options = {}) { - const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal, - }); - if (!arraysEqual(header, AVRO_INIT_BYTES)) { - throw new Error("Stream is not an Avro file."); - } - // File metadata is written as if defined by the following map schema: - // { "type": "map", "values": "bytes"} - this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal, - }); - // Validate codec - const codec = this._metadata[AVRO_CODEC_KEY]; - if (!(codec === undefined || codec === null || codec === "null")) { - throw new Error("Codecs are not supported"); +// Licensed under the MIT license. +/** + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. + * + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. + */ +class BufferScheduler { + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new events.EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); } - // The 16-byte, randomly-generated sync marker for this file. - this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal, - }); - // Parse the schema - const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); - this._itemType = AvroType.fromSchema(schema); - if (this._blockOffset === 0) { - this._blockOffset = this._initialBlockOffset + this._dataStream.position; + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); } - this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal, - }); - // skip block length - await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); - this._initialized = true; - if (this._objectIndex && this._objectIndex > 0) { - for (let i = 0; i < this._objectIndex; i++) { - await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); - this._itemsRemainingInBlock--; - } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; } - hasNext() { - return !this._initialized || this._itemsRemainingInBlock > 0; - } - parseObjects(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* parseObjects_1() { - if (!this._initialized) { - yield tslib.__await(this.initialize(options)); - } - while (this.hasNext()) { - const result = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal, - })); - this._itemsRemainingInBlock--; - this._objectIndex++; - if (this._itemsRemainingInBlock === 0) { - const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal, - })); - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - this._objectIndex = 0; - if (!arraysEqual(this._syncMarker, marker)) { - throw new Error("Stream is not a valid Avro file."); - } - try { - this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal, - })); + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); } - catch (err) { - // We hit the end of the stream. - this._itemsRemainingInBlock = 0; + else if (this.unresolvedLength >= this.bufferSize) { + return; } - if (this._itemsRemainingInBlock > 0) { - // Ignore block size - yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + else { + resolve(); } } - yield yield tslib.__await(result); - } + }); }); } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -class AvroReadable { -} - -// Copyright (c) Microsoft Corporation. -const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); -class AvroReadableFromStream extends AvroReadable { - constructor(readable) { - super(); - this._readable = readable; - this._position = 0; - } - toUint8Array(data) { - if (typeof data === "string") { - return Buffer.from(data); - } - return data; - } - get position() { - return this._position; + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; } - async read(size, options = {}) { - var _a; - if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw ABORT_ERROR; + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); } - if (size < 0) { - throw new Error(`size parameter should be positive: ${size}`); + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); } - if (size === 0) { - return new Uint8Array(); + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); } - if (!this._readable.readable) { - throw new Error("Stream no longer readable."); + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); } - // See if there is already enough data. - const chunk = this._readable.read(size); - if (chunk) { - this._position += chunk.length; - // chunk.length maybe less than desired size if the stream ends. - return this.toUint8Array(chunk); + catch (err) { + this.emitter.emit("error", err); + return; } - else { - // register callback to wait for enough data to read - return new Promise((resolve, reject) => { - /* eslint-disable @typescript-eslint/no-use-before-define */ - const cleanUp = () => { - this._readable.removeListener("readable", readableCallback); - this._readable.removeListener("error", rejectCallback); - this._readable.removeListener("end", rejectCallback); - this._readable.removeListener("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.removeEventListener("abort", abortHandler); - } - }; - const readableCallback = () => { - const callbackChunk = this._readable.read(size); - if (callbackChunk) { - this._position += callbackChunk.length; - cleanUp(); - // callbackChunk.length maybe less than desired size if the stream ends. - resolve(this.toUint8Array(callbackChunk)); - } - }; - const rejectCallback = () => { - cleanUp(); - reject(); - }; - const abortHandler = () => { - cleanUp(); - reject(ABORT_ERROR); - }; - this._readable.on("readable", readableCallback); - this._readable.once("error", rejectCallback); - this._readable.once("end", rejectCallback); - this._readable.once("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.addEventListener("abort", abortHandler); - } - /* eslint-enable @typescript-eslint/no-use-before-define */ - }); + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); } } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. + * Reads a readable stream into buffer. Fill the buffer from offset to end. * - * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream */ -class BlobQuickQueryStream extends stream.Readable { - /** - * Creates an instance of BlobQuickQueryStream. - * - * @param source - The current ReadableStream returned from getter - * @param options - - */ - constructor(source, options = {}) { - super(); - this.avroPaused = true; - this.source = source; - this.onProgress = options.onProgress; - this.onError = options.onError; - this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); - this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); - } - _read() { - if (this.avroPaused) { - this.readInternal().catch((err) => { - this.emit("error", err); - }); - } - } - async readInternal() { - this.avroPaused = false; - let avroNext; - do { - avroNext = await this.avroIter.next(); - if (avroNext.done) { - break; +async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); + stream.on("readable", () => { + if (pos >= count) { + clearTimeout(timeout); + resolve(); + return; } - const obj = avroNext.value; - const schema = obj.$schema; - if (typeof schema !== "string") { - throw Error("Missing schema in avro record."); + let chunk = stream.read(); + if (!chunk) { + return; } - switch (schema) { - case "com.microsoft.azure.storage.queryBlobContents.resultData": - { - const data = obj.data; - if (data instanceof Uint8Array === false) { - throw Error("Invalid data in avro result record."); - } - if (!this.push(Buffer.from(data))) { - this.avroPaused = true; - } - } - break; - case "com.microsoft.azure.storage.queryBlobContents.progress": - { - const bytesScanned = obj.bytesScanned; - if (typeof bytesScanned !== "number") { - throw Error("Invalid bytesScanned in avro progress record."); - } - if (this.onProgress) { - this.onProgress({ loadedBytes: bytesScanned }); - } - } - break; - case "com.microsoft.azure.storage.queryBlobContents.end": - if (this.onProgress) { - const totalBytes = obj.totalBytes; - if (typeof totalBytes !== "number") { - throw Error("Invalid totalBytes in avro end record."); - } - this.onProgress({ loadedBytes: totalBytes }); - } - this.push(null); - break; - case "com.microsoft.azure.storage.queryBlobContents.error": - if (this.onError) { - const fatal = obj.fatal; - if (typeof fatal !== "boolean") { - throw Error("Invalid fatal in avro error record."); - } - const name = obj.name; - if (typeof name !== "string") { - throw Error("Invalid name in avro error record."); - } - const description = obj.description; - if (typeof description !== "string") { - throw Error("Invalid description in avro error record."); - } - const position = obj.position; - if (typeof position !== "number") { - throw Error("Invalid position in avro error record."); - } - this.onError({ - position, - name, - isFatal: fatal, - description, - }); - } - break; - default: - throw Error(`Unknown schema ${schema} in avro progress record.`); + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); } - } while (!avroNext.done && !this.avroPaused); - } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + clearTimeout(timeout); + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", (msg) => { + clearTimeout(timeout); + reject(msg); + }); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); } - -// Copyright (c) Microsoft Corporation. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will - * parse avor data returned by blob query. + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. */ -class BlobQueryResponse { +async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs__namespace.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +class BlobClient extends StorageClient { /** - * Creates an instance of BlobQueryResponse. - * - * @param originalResponse - - * @param options - + * The name of the blob. */ - constructor(originalResponse, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + get name() { + return this._name; } /** - * Indicates that the service supports - * requests for partial file content. - * - * @readonly + * The name of the storage container the blob is associated with. */ - get acceptRanges() { - return this.originalResponse.acceptRanges; + get containerName() { + return this._containerName; } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = this.storageClientContext.blob; + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); } /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. * - * @readonly + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp */ - get contentDisposition() { - return this.originalResponse.contentDisposition; + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } /** - * Returns the value that was specified - * for the Content-Encoding request header. + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. * - * @readonly + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. */ - get contentEncoding() { - return this.originalResponse.contentEncoding; + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); } /** - * Returns the value that was specified - * for the Content-Language request header. + * Creates a AppendBlobClient object. * - * @readonly */ - get contentLanguage() { - return this.originalResponse.contentLanguage; + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); } /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. + * Creates a BlockBlobClient object. * - * @readonly */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); } /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * Creates a PageBlobClient object. * - * @readonly */ - get blobType() { - return this.originalResponse.blobType; + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); } /** - * The number of bytes present in the - * response body. + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. * - * @readonly - */ - get contentLength() { - return this.originalResponse.contentLength; - } - /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody * - * @readonly + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` */ - get contentMD5() { - return this.originalResponse.contentMD5; + async download(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.download({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onDownloadProgress: coreUtil.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, + range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!coreUtil.isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + }); } /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. + * Returns true if the Azure blob resource represented by this client exists; false otherwise. * - * @readonly + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. */ - get contentRange() { - return this.originalResponse.contentRange; + async exists(options = {}) { + return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + throw e; + } + }); } /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties * - * @readonly + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. */ - get contentType() { - return this.originalResponse.contentType; + async getProperties(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.getProperties({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + }); } /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @readonly + * @param options - Optional options to Blob Delete operation. */ - get copyCompletedOn() { - return undefined; + async delete(options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.delete({ + abortSignal: options.abortSignal, + deleteSnapshots: options.deleteSnapshots, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @readonly + * @param options - Optional options to Blob Delete operation. */ - get copyId() { - return this.originalResponse.copyId; + async deleteIfExists(options = {}) { + return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.delete(updatedOptions)); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob * - * @readonly + * @param options - Optional options to Blob Undelete operation. */ - get copyProgress() { - return this.originalResponse.copyProgress; + async undelete(options = {}) { + return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.undelete({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. + * Sets system properties on the blob. * - * @readonly + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. */ - get copySource() { - return this.originalResponse.copySource; + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setHttpHeaders({ + abortSignal: options.abortSignal, + blobHttpHeaders: blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' + * Sets user-defined metadata for the specified blob as one or more name-value pairs. * - * @readonly + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. */ - get copyStatus() { - return this.originalResponse.copyStatus; + async setMetadata(metadata, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). * - * @readonly + * @param tags - + * @param options - */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; + async setTags(tags, options = {}) { + return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + tags: toBlobTags(tags), + })); + }); } /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. + * Gets the tags associated with the underlying blob. * - * @readonly + * @param options - */ - get leaseDuration() { - return this.originalResponse.leaseDuration; + async getTags(options = {}) { + return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.blobContext.getTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + }); } /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * Get a {@link BlobLeaseClient} that manages leases on the blob. * - * @readonly + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. */ - get leaseState() { - return this.originalResponse.leaseState; + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); } /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob * - * @readonly + * @param options - Optional options to the Blob Create Snapshot operation. */ - get leaseStatus() { - return this.originalResponse.leaseStatus; + async createSnapshot(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.createSnapshot({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. * - * @readonly + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. */ - get date() { - return this.originalResponse.date; + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; } /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob * - * @readonly + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; + async abortCopyFromURL(copyId, options = {}) { + return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url * - * @readonly + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - */ - get etag() { - return this.originalResponse.etag; + async syncCopyFromURL(copySource, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f, _g; + return assertResponse(await this.blobContext.copyFromURL(copySource, { + abortSignal: options.abortSignal, + metadata: options.metadata, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + sourceContentMD5: options.sourceContentMD5, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn, + immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode, + legalHold: options.legalHold, + encryptionScope: options.encryptionScope, + copySourceTags: options.copySourceTags, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * The error code. + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier * - * @readonly + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. */ - get errorCode() { - return this.originalResponse.errorCode; + async setAccessTier(tier, options = {}) { + return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTier(toAccessTier(tier), { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + rehydratePriority: options.rehydratePriority, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + var _a; + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (blockSize === 0) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + blockSize < chunkEnd) { + chunkEnd = off + blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + }); } /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @readonly + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; + async downloadToFile(filePath, offset = 0, count, options = {}) { + return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + }); + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = new URL(this.url); + if (parsedUrl.host.split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } } /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob * - * @readonly + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; + async startCopyFromURL(copySource, options = {}) { + return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return assertResponse(await this.blobContext.startCopyFromURL(copySource, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + rehydratePriority: options.rehydratePriority, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + sealBlob: options.sealBlob, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. + * Only available for BlobClient constructed with a shared key credential. * - * @readonly + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - get lastModified() { - return this.originalResponse.lastModified; + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); } /** - * A name-value pair - * to associate with a file storage object. + * Delete the immutablility policy on the blob. * - * @readonly + * @param options - Optional options to delete immutability policy on the blob. */ - get metadata() { - return this.originalResponse.metadata; + async deleteImmutabilityPolicy(options = {}) { + return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.deleteImmutabilityPolicy({ + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. + * Set immutability policy on the blob. * - * @readonly + * @param options - Optional options to set immutability policy on the blob. */ - get requestId() { - return this.originalResponse.requestId; + async setImmutabilityPolicy(immutabilityPolicy, options = {}) { + return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setImmutabilityPolicy({ + immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, + immutabilityPolicyMode: immutabilityPolicy.policyMode, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. + * Set legal hold on the blob. * - * @readonly + * @param options - Optional options to set legal hold on the blob. */ - get clientRequestId() { - return this.originalResponse.clientRequestId; + async setLegalHold(legalHoldEnabled, options = {}) { + return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, { + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } +} +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.appendBlobContext = this.storageClientContext.appendBlob; } /** - * Indicates the version of the File service used - * to execute the request. + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. * - * @readonly + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ - get version() { - return this.originalResponse.version; + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * - * @readonly + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; + async create(options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.appendBlobContext.create(0, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - */ - get contentCrc64() { - return this.originalResponse.contentCrc64; + async createIfNotExists(options = {}) { + const conditions = { ifNoneMatch: ETagAny }; + return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** - * The response body as a browser Blob. - * Always undefined in node.js. + * Seals the append blob, making it read only. * - * @readonly + * @param options - */ - get blobBody() { - return undefined; + async seal(options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.seal({ + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block * - * It will parse avor data returned by blob query. + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. * - * @readonly + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : undefined; + async appendBlock(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.appendBlock(contentLength, body, { + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * The HTTP response. + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - */ - get _response() { - return this.originalResponse._response; + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + abortSignal: options.abortSignal, + sourceRange: rangeToString({ offset: sourceOffset, count }), + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + appendPositionAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } } - -// Copyright (c) Microsoft Corporation. /** - * Represents the access tier on a blob. - * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + * BlockBlobClient defines a set of operations applicable to block blobs. */ -exports.BlockBlobTier = void 0; -(function (BlockBlobTier) { - /** - * Optimized for storing data that is accessed frequently. - */ - BlockBlobTier["Hot"] = "Hot"; - /** - * Optimized for storing data that is infrequently accessed and stored for at least 30 days. - */ - BlockBlobTier["Cool"] = "Cool"; - /** - * Optimized for storing data that is rarely accessed. - */ - BlockBlobTier["Cold"] = "Cold"; +class BlockBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.blockBlobContext = this.storageClientContext.blockBlob; + this._blobContext = this.storageClientContext.blob; + } /** - * Optimized for storing data that is rarely accessed and stored for at least 180 days - * with flexible latency requirements (on the order of hours). + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ - BlockBlobTier["Archive"] = "Archive"; -})(exports.BlockBlobTier || (exports.BlockBlobTier = {})); -/** - * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. - * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} - * for detailed information on the corresponding IOPS and throughput per PageBlobTier. - */ -exports.PremiumPageBlobTier = void 0; -(function (PremiumPageBlobTier) { + withSnapshot(snapshot) { + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } /** - * P4 Tier. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - */ - PremiumPageBlobTier["P4"] = "P4"; + async query(query, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + if (!coreUtil.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._blobContext.query({ + abortSignal: options.abortSignal, + queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + }); + } /** - * P6 Tier. + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` */ - PremiumPageBlobTier["P6"] = "P6"; + async upload(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.upload(contentLength, body, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } /** - * P10 Tier. + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. */ - PremiumPageBlobTier["P10"] = "P10"; + async syncUploadFromURL(sourceURL, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f; + return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions }))); + }); + } /** - * P15 Tier. + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. */ - PremiumPageBlobTier["P15"] = "P15"; + async stageBlock(blockId, body, contentLength, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + requestOptions: { + onUploadProgress: options.onProgress, + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } /** - * P20 Tier. + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. */ - PremiumPageBlobTier["P20"] = "P20"; + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } /** - * P30 Tier. + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. */ - PremiumPageBlobTier["P30"] = "P30"; + async commitBlockList(blocks, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks }, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } /** - * P40 Tier. + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. */ - PremiumPageBlobTier["P40"] = "P40"; + async getBlockList(listType, options = {}) { + return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blockBlobContext.getBlockList(listType, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + }); + } + // High level functions /** - * P50 Tier. - */ - PremiumPageBlobTier["P50"] = "P50"; - /** - * P60 Tier. - */ - PremiumPageBlobTier["P60"] = "P60"; - /** - * P70 Tier. - */ - PremiumPageBlobTier["P70"] = "P70"; - /** - * P80 Tier. + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - */ - PremiumPageBlobTier["P80"] = "P80"; -})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); -function toAccessTier(tier) { - if (tier === undefined) { - return undefined; - } - return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). -} -function ensureCpkIfSpecified(cpk, isHttps) { - if (cpk && !isHttps) { - throw new RangeError("Customer-provided encryption key must be used over HTTPS."); - } - if (cpk && !cpk.encryptionAlgorithm) { - cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + async uploadData(data, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { + if (coreUtil.isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + }); } -} -/** - * Defines the known cloud audiences for Storage. - */ -exports.StorageBlobAudience = void 0; -(function (StorageBlobAudience) { /** - * The OAuth scope to use to retrieve an AAD token for Azure Storage. + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. */ - StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + async uploadBrowserData(browserData, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { + const browserBlob = new Blob([browserData]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + }); + } /** - * The OAuth scope to use to retrieve an AAD token for Azure Disk. + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; -})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); -function getBlobServiceAccountAudience(storageAccountName) { - return `https://${storageAccountName}.blob.core.windows.net/.default`; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Function that converts PageRange and ClearRange to a common Range object. - * PageRange and ClearRange have start and end while Range offset and count - * this function normalizes to Range. - * @param response - Model PageBlob Range response - */ -function rangeResponseFromModel(response) { - const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start, - })); - const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start, - })); - return Object.assign(Object.assign({}, response), { pageRange, - clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { - pageRange, - clearRange, - } }) }); -} - -// Copyright (c) Microsoft Corporation. -/** - * This is the poller returned by {@link BlobClient.beginCopyFromURL}. - * This can not be instantiated directly outside of this package. - * - * @hidden - */ -class BlobBeginCopyFromUrlPoller extends coreLro.Poller { - constructor(options) { - const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; - let state; - if (resumeFrom) { - state = JSON.parse(resumeFrom).state; - } - const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, - copySource, - startCopyFromURLOptions })); - super(operation); - if (typeof onProgress === "function") { - this.onProgress(onProgress); + async uploadSeekableInternal(bodyFactory, size, options = {}) { + var _a, _b; + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); } - this.intervalInMs = intervalInMs; - } - delay() { - return coreHttp.delay(this.intervalInMs); - } -} -/** - * Note: Intentionally using function expression over arrow function expression - * so that the function can be invoked with a different context. - * This affects what `this` refers to. - * @hidden - */ -const cancel = async function cancel(options = {}) { - const state = this.state; - const { copyId } = state; - if (state.isCompleted) { - return makeBlobBeginCopyFromURLPollOperation(state); - } - if (!copyId) { - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); - } - // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call - await state.blobClient.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal, - }); - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); -}; -/** - * Note: Intentionally using function expression over arrow function expression - * so that the function can be invoked with a different context. - * This affects what `this` refers to. - * @hidden - */ -const update = async function update(options = {}) { - const state = this.state; - const { blobClient, copySource, startCopyFromURLOptions } = state; - if (!state.isStarted) { - state.isStarted = true; - const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); - // copyId is needed to abort - state.copyId = result.copyId; - if (result.copyStatus === "success") { - state.result = result; - state.isCompleted = true; + const maxSingleShotSize = (_b = options.maxSingleShotSize) !== null && _b !== void 0 ? _b : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); } - } - else if (!state.isCompleted) { - try { - const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); - const { copyStatus, copyProgress } = result; - const prevCopyProgress = state.copyProgress; - if (copyProgress) { - state.copyProgress = copyProgress; - } - if (copyStatus === "pending" && - copyProgress !== prevCopyProgress && - typeof options.fireProgress === "function") { - // trigger in setTimeout, or swallow error? - options.fireProgress(state); - } - else if (copyStatus === "success") { - state.result = result; - state.isCompleted = true; + if (blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); } - else if (copyStatus === "failed") { - state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); - state.isCompleted = true; + if (size > maxSingleShotSize) { + blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } } } - catch (err) { - state.error = err; - state.isCompleted = true; - } - } - return makeBlobBeginCopyFromURLPollOperation(state); -}; -/** - * Note: Intentionally using function expression over arrow function expression - * so that the function can be invoked with a different context. - * This affects what `this` refers to. - * @hidden - */ -const toString = function toString() { - return JSON.stringify({ state: this.state }, (key, value) => { - // remove blobClient from serialized state since a client can't be hydrated from this info. - if (key === "blobClient") { - return undefined; + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; } - return value; - }); -}; -/** - * Creates a poll operation given the provided state. - * @hidden - */ -function makeBlobBeginCopyFromURLPollOperation(state) { - return { - state: Object.assign({}, state), - cancel, - toString, - update, - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Generate a range string. For example: - * - * "bytes=255-" or "bytes=0-511" - * - * @param iRange - - */ -function rangeToString(iRange) { - if (iRange.offset < 0) { - throw new RangeError(`Range.offset cannot be smaller than 0.`); - } - if (iRange.count && iRange.count <= 0) { - throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); - } - return iRange.count - ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` - : `bytes=${iRange.offset}-`; -} - -// Copyright (c) Microsoft Corporation. -/** - * States for Batch. - */ -var BatchStates; -(function (BatchStates) { - BatchStates[BatchStates["Good"] = 0] = "Good"; - BatchStates[BatchStates["Error"] = 1] = "Error"; -})(BatchStates || (BatchStates = {})); -/** - * Batch provides basic parallel execution with concurrency limits. - * Will stop execute left operations when one of the executed operation throws an error. - * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. - */ -class Batch { - /** - * Creates an instance of Batch. - * @param concurrency - - */ - constructor(concurrency = 5) { - /** - * Number of active operations under execution. - */ - this.actives = 0; - /** - * Number of completed operations under execution. - */ - this.completed = 0; - /** - * Offset of next operation to be executed. - */ - this.offset = 0; - /** - * Operation array to be executed. - */ - this.operations = []; - /** - * States of Batch. When an error happens, state will turn into error. - * Batch will stop execute left operations. - */ - this.state = BatchStates.Good; - if (concurrency < 1) { - throw new RangeError("concurrency must be larger than 0"); + if (!options.conditions) { + options.conditions = {}; } - this.concurrency = concurrency; - this.emitter = new events.EventEmitter(); - } - /** - * Add a operation into queue. - * - * @param operation - - */ - addOperation(operation) { - this.operations.push(async () => { - try { - this.actives++; - await operation(); - this.actives--; - this.completed++; - this.parallelExecute(); + return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { + if (size <= maxSingleShotSize) { + return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions)); } - catch (error) { - this.emitter.emit("error", error); + const numBlocks = Math.floor((size - 1) / blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = coreUtil.randomUUID(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = blockSize * i; + const end = i === numBlocks - 1 ? size : start + blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress, + }); + } + }); } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); }); } /** - * Start execute operations in the queue. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - async do() { - if (this.operations.length === 0) { - return Promise.resolve(); - } - this.parallelExecute(); - return new Promise((resolve, reject) => { - this.emitter.on("finish", resolve); - this.emitter.on("error", (error) => { - this.state = BatchStates.Error; - reject(error); - }); + async uploadFile(filePath, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { + const size = (await fsStat(filePath)).size; + return this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Get next operation to be executed. Return null when reaching ends. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - */ - nextOperation() { - if (this.offset < this.operations.length) { - return this.operations[this.offset++]; - } - return null; - } - /** - * Start execute operations. One one the most important difference between - * this method with do() is that do() wraps as an sync method. + * Uploads a Node.js Readable stream into block blob. * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - parallelExecute() { - if (this.state === BatchStates.Error) { - return; - } - if (this.completed >= this.operations.length) { - this.emitter.emit("finish"); - return; + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; } - while (this.actives < this.concurrency) { - const operation = this.nextOperation(); - if (operation) { - operation(); - } - else { - return; - } + if (!options.conditions) { + options.conditions = {}; } + return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { + let blockNum = 0; + const blockIDPrefix = coreUtil.randomUUID(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }))); + }); } } - -// Copyright (c) Microsoft Corporation. /** - * This class generates a readable stream from the data in an array of buffers. + * PageBlobClient defines a set of operations applicable to page blobs. */ -class BuffersStream extends stream.Readable { - /** - * Creates an instance of BuffersStream that will emit the data - * contained in the array of buffers. - * - * @param buffers - Array of buffers containing the data - * @param byteLength - The total length of data contained in the buffers - */ - constructor(buffers, byteLength, options) { - super(options); - this.buffers = buffers; - this.byteLength = byteLength; - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex = 0; - this.pushedBytesLength = 0; - // check byteLength is no larger than buffers[] total length - let buffersLength = 0; - for (const buf of this.buffers) { - buffersLength += buf.byteLength; - } - if (buffersLength < this.byteLength) { - throw new Error("Data size shouldn't be larger than the total length of buffers."); +class PageBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; } - } - /** - * Internal _read() that will be called when the stream wants to pull more data in. - * - * @param size - Optional. The size of data to be read - */ - _read(size) { - if (this.pushedBytesLength >= this.byteLength) { - this.push(null); + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } - if (!size) { - size = this.readableHighWaterMark; + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); } - const outBuffers = []; - let i = 0; - while (i < size && this.pushedBytesLength < this.byteLength) { - // The last buffer may be longer than the data it contains. - const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; - const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; - const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); - if (remaining > size - i) { - // chunkSize = size - i - const end = this.byteOffsetInCurrentBuffer + size - i; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - this.pushedBytesLength += size - i; - this.byteOffsetInCurrentBuffer = end; - i = size; - break; - } - else { - // chunkSize = remaining - const end = this.byteOffsetInCurrentBuffer + remaining; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - if (remaining === remainingCapacityInThisBuffer) { - // this.buffers[this.bufferIndex] used up, shift to next one - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex++; + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); } else { - this.byteOffsetInCurrentBuffer = end; + throw new Error("Account connection string is only supported in Node.js environment"); } - this.pushedBytesLength += remaining; - i += remaining; } - } - if (outBuffers.length > 1) { - this.push(Buffer.concat(outBuffers)); - } - else if (outBuffers.length === 1) { - this.push(outBuffers[0]); - } - } -} - -// Copyright (c) Microsoft Corporation. -/** - * maxBufferLength is max size of each buffer in the pooled buffers. - */ -// Can't use import as Typescript doesn't recognize "buffer". -const maxBufferLength = (__nccwpck_require__(14300).constants.MAX_LENGTH); -/** - * This class provides a buffer container which conceptually has no hard size limit. - * It accepts a capacity, an array of input buffers and the total length of input data. - * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers - * into the internal "buffer" serially with respect to the total length. - * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream - * assembled from all the data in the internal "buffer". - */ -class PooledBuffer { - constructor(capacity, buffers, totalLength) { - /** - * Internal buffers used to keep the data. - * Each buffer has a length of the maxBufferLength except last one. - */ - this.buffers = []; - this.capacity = capacity; - this._size = 0; - // allocate - const bufferNum = Math.ceil(capacity / maxBufferLength); - for (let i = 0; i < bufferNum; i++) { - let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; - if (len === 0) { - len = maxBufferLength; + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } - this.buffers.push(Buffer.allocUnsafe(len)); } - if (buffers) { - this.fill(buffers, totalLength); + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } + super(url, pipeline); + this.pageBlobContext = this.storageClientContext.pageBlob; } /** - * The size of the data contained in the pooled buffers. + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ - get size() { - return this._size; + withSnapshot(snapshot) { + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } /** - * Fill the internal buffers with data in the input buffers serially - * with respect to the total length and the total capacity of the internal buffers. - * Data copied will be shift out of the input buffers. + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * - * @param buffers - Input buffers containing the data to be filled in the pooled buffer - * @param totalLength - Total length of the data to be filled in. + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.pageBlobContext.create(0, size, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + blobSequenceNumber: options.blobSequenceNumber, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * + * @param size - size of the page blob. + * @param options - */ - fill(buffers, totalLength) { - this._size = Math.min(this.capacity, totalLength); - let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; - while (totalCopiedNum < this._size) { - const source = buffers[i]; - const target = this.buffers[j]; - const copiedNum = source.copy(target, targetOffset, sourceOffset); - totalCopiedNum += copiedNum; - sourceOffset += copiedNum; - targetOffset += copiedNum; - if (sourceOffset === source.length) { - i++; - sourceOffset = 0; + async createIfNotExists(size, options = {}) { + return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - if (targetOffset === target.length) { - j++; - targetOffset = 0; + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; } - } - // clear copied from source buffers - buffers.splice(0, i); - if (buffers.length > 0) { - buffers[0] = buffers[0].slice(sourceOffset); - } + }); } /** - * Get the readable stream assembled from all the data in the internal buffers. + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. */ - getReadableStream() { - return new BuffersStream(this.buffers, this.size); + async uploadPages(body, offset, count, options = {}) { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.uploadPages(count, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onUploadProgress: options.onProgress, + }, + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } -} - -// Copyright (c) Microsoft Corporation. -/** - * This class accepts a Node.js Readable stream as input, and keeps reading data - * from the stream into the internal buffer structure, until it reaches maxBuffers. - * Every available buffer will try to trigger outgoingHandler. - * - * The internal buffer structure includes an incoming buffer array, and a outgoing - * buffer array. The incoming buffer array includes the "empty" buffers can be filled - * with new incoming data. The outgoing array includes the filled buffers to be - * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. - * - * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING - * - * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers - * - * PERFORMANCE IMPROVEMENT TIPS: - * 1. Input stream highWaterMark is better to set a same value with bufferSize - * parameter, which will avoid Buffer.concat() operations. - * 2. concurrency should set a smaller value than maxBuffers, which is helpful to - * reduce the possibility when a outgoing handler waits for the stream data. - * in this situation, outgoing handlers are blocked. - * Outgoing queue shouldn't be empty. - */ -class BufferScheduler { /** - * Creates an instance of BufferScheduler. + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url * - * @param readable - A Node.js Readable stream - * @param bufferSize - Buffer size of every maintained buffer - * @param maxBuffers - How many buffers can be allocated - * @param outgoingHandler - An async function scheduled to be - * triggered when a buffer fully filled - * with stream data - * @param concurrency - Concurrency of executing outgoingHandlers (>0) - * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - */ - constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { - /** - * An internal event emitter. - */ - this.emitter = new events.EventEmitter(); - /** - * An internal offset marker to track data offset in bytes of next outgoingHandler. - */ - this.offset = 0; - /** - * An internal marker to track whether stream is end. - */ - this.isStreamEnd = false; - /** - * An internal marker to track whether stream or outgoingHandler returns error. - */ - this.isError = false; - /** - * How many handlers are executing. - */ - this.executingOutgoingHandlers = 0; - /** - * How many buffers have been allocated. - */ - this.numBuffers = 0; - /** - * Because this class doesn't know how much data every time stream pops, which - * is defined by highWaterMarker of the stream. So BufferScheduler will cache - * data received from the stream, when data in unresolvedDataArray exceeds the - * blockSize defined, it will try to concat a blockSize of buffer, fill into available - * buffers from incoming and push to outgoing array. - */ - this.unresolvedDataArray = []; - /** - * How much data consisted in unresolvedDataArray. - */ - this.unresolvedLength = 0; - /** - * The array includes all the available buffers can be used to fill data from stream. - */ - this.incoming = []; - /** - * The array (queue) includes all the buffers filled from stream data. - */ - this.outgoing = []; - if (bufferSize <= 0) { - throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); - } - if (maxBuffers <= 0) { - throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); - } - if (concurrency <= 0) { - throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); - } - this.bufferSize = bufferSize; - this.maxBuffers = maxBuffers; - this.readable = readable; - this.outgoingHandler = outgoingHandler; - this.concurrency = concurrency; - this.encoding = encoding; + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), { + abortSignal: options.abortSignal, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + sequenceNumberAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Start the scheduler, will return error when stream of any of the outgoingHandlers - * returns error. + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. */ - async do() { - return new Promise((resolve, reject) => { - this.readable.on("data", (data) => { - data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; - this.appendUnresolvedData(data); - if (!this.resolveData()) { - this.readable.pause(); - } - }); - this.readable.on("error", (err) => { - this.emitter.emit("error", err); - }); - this.readable.on("end", () => { - this.isStreamEnd = true; - this.emitter.emit("checkEnd"); - }); - this.emitter.on("error", (err) => { - this.isError = true; - this.readable.pause(); - reject(err); - }); - this.emitter.on("checkEnd", () => { - if (this.outgoing.length > 0) { - this.triggerOutgoingHandlers(); - return; - } - if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { - if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { - const buffer = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) - .then(resolve) - .catch(reject); - } - else if (this.unresolvedLength >= this.bufferSize) { - return; - } - else { - resolve(); - } - } - }); + async clearPages(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.clearPages(0, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); }); } /** - * Insert a new data into unresolved array. + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param data - + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. */ - appendUnresolvedData(data) { - this.unresolvedDataArray.push(data); - this.unresolvedLength += data.length; + async getPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); } /** - * Try to shift a buffer with size in blockSize. The buffer returned may be less - * than blockSize when data in unresolvedDataArray is less than bufferSize. + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. */ - shiftBufferFromUnresolvedDataArray(buffer) { - if (!buffer) { - buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); - } - else { - buffer.fill(this.unresolvedDataArray, this.unresolvedLength); - } - this.unresolvedLength -= buffer.size; - return buffer; + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + marker: marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Resolve data in unresolvedDataArray. For every buffer with size in blockSize - * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, - * then push it into outgoing to be handled by outgoing handler. - * - * Return false when available buffers in incoming are not enough, else true. + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} * - * @returns Return false when buffers in incoming are not enough, else true. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. */ - resolveData() { - while (this.unresolvedLength >= this.bufferSize) { - let buffer; - if (this.incoming.length > 0) { - buffer = this.incoming.shift(); - this.shiftBufferFromUnresolvedDataArray(buffer); - } - else { - if (this.numBuffers < this.maxBuffers) { - buffer = this.shiftBufferFromUnresolvedDataArray(); - this.numBuffers++; - } - else { - // No available buffer, wait for buffer returned - return false; - } + listPageRangeItemSegments() { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker, options = {}) { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); } - this.outgoing.push(buffer); - this.triggerOutgoingHandlers(); - } - return true; + }); } /** - * Try to trigger a outgoing handler for every buffer in outgoing. Stop when - * concurrency reaches. + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. */ - async triggerOutgoingHandlers() { - let buffer; - do { - if (this.executingOutgoingHandlers >= this.concurrency) { - return; + listPageRangeItems() { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) { + var _a, e_1, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } } - buffer = this.outgoing.shift(); - if (buffer) { - this.triggerOutgoingHandler(buffer); + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_1) throw e_1.error; } } - } while (buffer); + }); } /** - * Trigger a outgoing handler for a buffer shifted from outgoing. + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param buffer - - */ - async triggerOutgoingHandler(buffer) { - const bufferLength = buffer.size; - this.executingOutgoingHandlers++; - this.offset += bufferLength; - try { - await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); - } - catch (err) { - this.emitter.emit("error", err); - return; - } - this.executingOutgoingHandlers--; - this.reuseBuffer(buffer); - this.emitter.emit("checkEnd"); - } - /** - * Return buffer used by outgoing handler into incoming. + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. * - * @param buffer - - */ - reuseBuffer(buffer) { - this.incoming.push(buffer); - if (!this.isError && this.resolveData() && !this.isStreamEnd) { - this.readable.resume(); - } - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Reads a readable stream into buffer. Fill the buffer from offset to end. - * - * @param stream - A Node.js Readable stream - * @param buffer - Buffer to be filled, length must greater than or equal to offset - * @param offset - From which position in the buffer to be filled, inclusive - * @param end - To which position in the buffer to be filled, exclusive - * @param encoding - Encoding of the Readable stream - */ -async function streamToBuffer(stream, buffer, offset, end, encoding) { - let pos = 0; // Position in stream - const count = end - offset; // Total amount of data needed in stream - return new Promise((resolve, reject) => { - const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); - stream.on("readable", () => { - if (pos >= count) { - clearTimeout(timeout); - resolve(); - return; - } - let chunk = stream.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - // How much data needed in this chunk - const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; - buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); - pos += chunkLength; - }); - stream.on("end", () => { - clearTimeout(timeout); - if (pos < count) { - reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); - } - resolve(); - }); - stream.on("error", (msg) => { - clearTimeout(timeout); - reject(msg); - }); - }); -} -/** - * Reads a readable stream into buffer entirely. - * - * @param stream - A Node.js Readable stream - * @param buffer - Buffer to be filled, length must greater than or equal to offset - * @param encoding - Encoding of the Readable stream - * @returns with the count of bytes read. - * @throws `RangeError` If buffer size is not big enough. - */ -async function streamToBuffer2(stream, buffer, encoding) { - let pos = 0; // Position in stream - const bufferSize = buffer.length; - return new Promise((resolve, reject) => { - stream.on("readable", () => { - let chunk = stream.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - if (pos + chunk.length > bufferSize) { - reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); - return; - } - buffer.fill(chunk, pos, pos + chunk.length); - pos += chunk.length; - }); - stream.on("end", () => { - resolve(pos); - }); - stream.on("error", reject); - }); -} -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. - * - * @param rs - The read stream. - * @param file - Destination file path. - */ -async function readStreamToLocalFile(rs, file) { - return new Promise((resolve, reject) => { - const ws = fs__namespace.createWriteStream(file); - rs.on("error", (err) => { - reject(err); - }); - ws.on("error", (err) => { - reject(err); - }); - ws.on("close", resolve); - rs.pipe(ws); - }); -} -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Promisified version of fs.stat(). - */ -const fsStat = util__namespace.promisify(fs__namespace.stat); -const fsCreateReadStream = fs__namespace.createReadStream; - -/** - * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, - * append blob, or page blob. - */ -class BlobClient extends StorageClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - options = options || {}; - let pipeline; - let url; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - // (url: string, pipeline: Pipeline) - url = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || - credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - url = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } - else if (!credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName !== "string") { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - // The second parameter is undefined. Use anonymous credential. - url = urlOrConnectionString; - if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { - options = blobNameOrOptions; - } - pipeline = newPipeline(new AnonymousCredential(), options); - } - else if (credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName === "string" && - blobNameOrOptions && - typeof blobNameOrOptions === "string") { - // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } - else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } - else if (extractedCreds.kind === "SASConnString") { - url = - appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + - "?" + - extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } - else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } - else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url, pipeline); - ({ blobName: this._name, containerName: this._containerName } = - this.getBlobAndContainerNamesFromUrl()); - this.blobContext = new Blob$1(this.storageClientContext); - this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); - this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); - } - /** - * The name of the blob. - */ - get name() { - return this._name; - } - /** - * The name of the storage container the blob is associated with. - */ - get containerName() { - return this._containerName; - } - /** - * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Example using `for await` syntax: * - * @param snapshot - The snapshot timestamp. - * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. */ - withSnapshot(snapshot) { - return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; } /** - * Creates a new BlobClient object pointing to a version of this blob. - * Provide "" will remove the versionId and return a Client to the base blob. + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param versionId - The versionId. - * @returns A new BlobClient object pointing to the version of this blob. + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. */ - withVersion(versionId) { - return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { + var _a; + const result = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshot, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(result); + }); } /** - * Creates a AppendBlobClient object. + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - getAppendBlobClient() { - return new AppendBlobClient(this.url, this.pipeline); + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { + return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, + leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshotOrUrl, + range: rangeToString({ + offset: offset, + count: count, + }), + marker: marker, + maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Creates a BlockBlobClient object. + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - getBlockBlobClient() { - return new BlockBlobClient(this.url, this.pipeline); + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); } /** - * Creates a PageBlobClient object. + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - getPageBlobClient() { - return new PageBlobClient(this.url, this.pipeline); + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { + var _a, e_2, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_2) throw e_2.error; } + } + }); } /** - * Reads or downloads a blob from the system, including its metadata and properties. - * You can also call Get Blob to read a snapshot. - * - * * In Node.js, data returns in a Readable stream readableStreamBody - * * In browsers, data returns in a promise blobBody + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. * - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Optional options to Blob Download operation. + * Example using `for await` syntax: * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` * - * Example usage (Node.js): + * Example using `iter.next()`: * * ```js - * // Download and convert a blob to a string - * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); - * console.log("Downloaded blob content:", downloaded.toString()); + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` * - * async function streamToBuffer(readableStream) { - * return new Promise((resolve, reject) => { - * const chunks = []; - * readableStream.on("data", (data) => { - * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); - * }); - * readableStream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * readableStream.on("error", reject); - * }); + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * } * ``` * - * Example usage (browser): + * Example using paging with a marker: * * ```js - * // Download and convert a blob to a string - * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); - * console.log( - * "Downloaded blob content", - * downloaded - * ); + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; * - * async function blobToString(blob: Blob): Promise { - * const fileReader = new FileReader(); - * return new Promise((resolve, reject) => { - * fileReader.onloadend = (ev: any) => { - * resolve(ev.target!.result); - * }; - * fileReader.onerror = reject; - * fileReader.readAsText(blob); - * }); + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. */ - async download(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlobClient-download", options); - try { - const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream - }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - // Return browser response immediately - if (!coreHttp.isNode) { - return wrappedRes; - } - // We support retrying when download stream unexpected ends in Node.js runtime - // Following code shouldn't be bundled into browser build, however some - // bundlers may try to bundle following code and "FileReadResponse.ts". - // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" - // The config is in package.json "browser" field - if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { - // TODO: Default value or make it a required parameter? - options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; - } - if (res.contentLength === undefined) { - throw new RangeError(`File download response doesn't contain valid content length header`); - } - if (!res.etag) { - throw new RangeError(`File download response doesn't contain valid etag header`); - } - return new BlobDownloadResponse(wrappedRes, async (start) => { - var _a; - const updatedDownloadOptions = { - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ifMatch: options.conditions.ifMatch || res.etag, - ifModifiedSince: options.conditions.ifModifiedSince, - ifNoneMatch: options.conditions.ifNoneMatch, - ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, - }, - range: rangeToString({ - count: offset + res.contentLength - start, - offset: start, - }), - rangeGetContentMD5: options.rangeGetContentMD5, - rangeGetContentCRC64: options.rangeGetContentCrc64, - snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey, - }; - // Debug purpose only - // console.log( - // `Read from internal stream, range: ${ - // updatedOptions.range - // }, options: ${JSON.stringify(updatedOptions)}` - // ); - return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; - }, offset, res.contentLength, { - maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress, - }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; } /** - * Returns true if the Azure blob resource represented by this client exists; false otherwise. - * - * NOTE: use this function with care since an existing blob might be deleted by other clients or - * applications. Vice versa new blobs might be added by other clients or applications after this - * function completes. + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param options - options to Exists operation. + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-exists", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - await this.getProperties({ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevSnapshotUrl, + range: rangeToString({ offset, count }), tracingOptions: updatedOptions.tracingOptions, - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - // Expected exception when checking blob existence - return false; - } - else if (e.statusCode === 409 && - (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || - e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { - // Expected exception when checking blob existence - return true; - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + })); + return rangeResponseFromModel(response); + }); } /** - * Returns all user-defined metadata, standard HTTP properties, and system properties - * for the blob. It does not return the content of the blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties - * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which - * will retain their original casing. + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties * - * @param options - Optional options to Get Properties operation. + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. */ - async getProperties(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); - try { - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async resize(size, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.resize(size, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties * - * @param options - Optional options to Blob Delete operation. + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. */ - async delete(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-delete", options); + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { options.conditions = options.conditions || {}; - try { - return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { + abortSignal: options.abortSignal, + blobSequenceNumber: sequenceNumber, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots * - * @param options - Optional options to Blob Delete operation. + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. */ - async deleteIfExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + async startCopyIncremental(copySource, options = {}) { + return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.copyIncremental(copySource, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return buffer.toString(); +} +function utf8ByteLength(str) { + return Buffer.byteLength(str); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const HTTP_HEADER_DELIMITER = ": "; +const SPACE_DELIMITER = " "; +const NOT_FOUND = -1; +/** + * Util class for parsing batch response. + */ +class BatchResponseParser { + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + // In special case(reported), server may return invalid content-type which could not be parsed. + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); } - finally { - span.end(); + if (!subRequests || subRequests.size === 0) { + // This should be prevent during coding. + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; } - /** - * Restores the contents and metadata of soft deleted blob and any associated - * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 - * or later. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob - * - * @param options - Optional options to Blob Undelete operation. - */ - async undelete(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-undelete", options); - try { - return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); // string before first response boundary is useless + const subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); } - finally { - span.end(); + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + // Parse sub subResponses. + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = coreHttpCompat.toHttpHeadersLike(coreRestPipeline.createHttpHeaders()); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + // Parse headers of sub response. + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId !== NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount, + }; } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var MutexLockStatus; +(function (MutexLockStatus) { + MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; +})(MutexLockStatus || (MutexLockStatus = {})); +/** + * An async mutex lock. + */ +class Mutex { /** - * Sets system properties on the blob. - * - * If no value provided, or no value provided for the specified blob HTTP headers, - * these blob HTTP headers without a value will be cleared. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. * - * @param blobHTTPHeaders - If no value provided, or no value provided for - * the specified blob HTTP headers, these blob HTTP - * headers without a value will be cleared. - * A common header to set is `blobContentType` - * enabling the browser to provide functionality - * based on file type. - * @param options - Optional options to Blob Set HTTP Headers operation. + * @param key - lock key */ - async setHTTPHeaders(blobHTTPHeaders, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); } /** - * Sets user-defined metadata for the specified blob as one or more name-value pairs. - * - * If no option provided, or no metadata defined in the parameter, the blob - * metadata will be removed. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * Unlock a key. * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Optional options to Set Metadata operation. + * @param key - */ - async setMetadata(metadata, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); + } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === undefined) { + this.listeners[key] = [handler]; } - finally { - span.end(); + else { + this.listeners[key].push(handler); } } - /** - * Sets tags on the underlying blob. - * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. - * Valid tag key and value characters include lower and upper case letters, digits (0-9), - * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). - * - * @param tags - - * @param options - - */ - async setTags(tags, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setTags", options); - try { - return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, + static emitUnlockEvent(key) { + if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); }); - throw e; - } - finally { - span.end(); } } +} +Mutex.keys = {}; +Mutex.listeners = {}; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +class BlobBatch { + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); + } /** - * Gets the tags associated with the underlying blob. - * - * @param options - + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 */ - async getTags(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getTags", options); - try { - const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); } /** - * Get a {@link BlobLeaseClient} that manages leases on the blob. - * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the blob. + * Get assembled HTTP request body for sub requests. */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); } /** - * Creates a read-only snapshot of a blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob - * - * @param options - Optional options to the Blob Create Snapshot operation. + * Get sub requests that are added into the batch request. */ - async createSnapshot(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); - options.conditions = options.conditions || {}; + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); } finally { - span.end(); + await Mutex.unlock(this.batch); } } - /** - * Asynchronously copies a blob to a destination within the storage account. - * This method returns a long running operation poller that allows you to wait - * indefinitely until the copy is completed. - * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. - * Note that the onProgress callback will not be invoked if the operation completes in the first - * request, and attempting to cancel a completed copy will result in an error being thrown. - * - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob - * - * Example using automatic polling: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * const result = await copyPoller.pollUntilDone(); - * ``` - * - * Example using manual polling: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * while (!poller.isDone()) { - * await poller.poll(); - * } - * const result = copyPoller.getResult(); - * ``` - * - * Example using progress updates: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { - * onProgress(state) { - * console.log(`Progress: ${state.copyProgress}`); - * } - * }); - * const result = await copyPoller.pollUntilDone(); - * ``` - * - * Example using a changing polling interval (default 15 seconds): - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { - * intervalInMs: 1000 // poll blob every 1 second for copy progress - * }); - * const result = await copyPoller.pollUntilDone(); - * ``` - * - * Example using copy cancellation: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * // cancel operation after starting it. - * try { - * await copyPoller.cancelOperation(); - * // calls to get the result now throw PollerCancelledError - * await copyPoller.getResult(); - * } catch (err) { - * if (err.name === 'PollerCancelledError') { - * console.log('The copy was cancelled.'); - * } - * } - * ``` - * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. - */ - async beginCopyFromURL(copySource, options = {}) { - const client = { - abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), - getProperties: (...args) => this.getProperties(...args), - startCopyFromURL: (...args) => this.startCopyFromURL(...args), - }; - const poller = new BlobBeginCopyFromUrlPoller({ - blobClient: client, - copySource, - intervalInMs: options.intervalInMs, - onProgress: options.onProgress, - resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options, - }); - // Trigger the startCopyFromURL call by calling poll. - // Any errors from this method should be surfaced to the user. - await poller.poll(); - return poller; + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } } - /** - * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero - * length and full metadata. Version 2012-02-12 and newer. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob - * - * @param copyId - Id of the Copy From URL operation. - * @param options - Optional options to the Blob Abort Copy From URL operation. - */ - async abortCopyFromURL(copyId, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); - try { - return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && + ((coreUtil.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; } - finally { - span.end(); + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); } - } - /** - * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not - * return a response until the copy is complete. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url - * - * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication - * @param options - - */ - async syncCopyFromURL(copySource, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + if (!options) { + options = {}; } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, + return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); - throw e; + }); + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && + ((coreUtil.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; } - finally { - span.end(); + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); } + if (!options) { + options = {}; + } + return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + }); + } +} +/** + * Inner batch request class which is responsible for assembling and serializing sub requests. + * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. + */ +class InnerBatchRequest { + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = coreUtil.randomUUID(); + // batch_{batchid} + this.boundary = `batch_${tempGuid}`; + // --batch_{batchid} + // Content-Type: application/http + // Content-Transfer-Encoding: binary + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + // multipart/mixed; boundary=batch_{batchid} + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + // --batch_{batchid}-- + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = new Map(); } /** - * Sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant - * storage only). A premium page blob's tier determines the allowed size, IOPS, - * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive - * storage type. This operation does not update the blob's ETag. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier - * - * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. - * @param options - Optional options to the Blob Set Tier operation. + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ - async setAccessTier(tier, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); - try { - return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); + createPipeline(credential) { + const corePipeline = coreRestPipeline.createEmptyPipeline(); + corePipeline.addPolicy(coreClient.serializationPolicy({ + stringifyXML: coreXml.stringifyXML, + serializerOptions: { + xml: { + xmlCharKey: "#", + }, + }, + }), { phase: "Serialize" }); + // Use batch header filter policy to exclude unnecessary headers + corePipeline.addPolicy(batchHeaderFilterPolicy()); + // Use batch assemble policy to assemble request and intercept request from going to wire + corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); + if (coreAuth.isTokenCredential(credential)) { + corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential, + scopes: StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + const pipeline = new Pipeline([]); + // attach the v2 pipeline to this one + pipeline._credential = credential; + pipeline._corePipeline = corePipeline; + return pipeline; + } + appendSubRequestToBody(request) { + // Start to assemble sub request + this.body += [ + this.subRequestPrefix, // sub request constant prefix + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID + "", // empty line after sub request's content ID + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const [name, value] of request.headers) { + this.body += `${name}: ${value}${HTTP_LINE_ENDING}`; } + this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line + // No body to assemble for current batch request support + // End to assemble sub request } - async downloadToBuffer(param1, param2, param3, param4 = {}) { - let buffer; - let offset = 0; - let count = 0; - let options = param4; - if (param1 instanceof Buffer) { - buffer = param1; - offset = param2 || 0; - count = typeof param3 === "number" ? param3 : 0; + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); } - else { - offset = typeof param1 === "number" ? param1 : 0; - count = typeof param2 === "number" ? param2 : 0; - options = param3 || {}; + // Fast fail if url for sub request is invalid + const path = getURLPath(subRequest.url); + if (!path || path === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } - const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); - try { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0) { - throw new RangeError("blockSize option must be >= 0"); - } - if (options.blockSize === 0) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - if (offset < 0) { - throw new RangeError("offset option must be >= 0"); - } - if (count && count <= 0) { - throw new RangeError("count option must be greater than 0"); - } - if (!options.conditions) { - options.conditions = {}; - } - // Customer doesn't specify length, get it - if (!count) { - const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - count = response.contentLength - offset; - if (count < 0) { - throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); - } - } - // Allocate the buffer of size = count if the buffer is not provided - if (!buffer) { - try { - buffer = Buffer.alloc(count); - } - catch (error) { - throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } +} +function batchRequestAssemblePolicy(batchRequest) { + return { + name: "batchRequestAssemblePolicy", + async sendRequest(request) { + batchRequest.appendSubRequestToBody(request); + return { + request, + status: 200, + headers: coreRestPipeline.createHttpHeaders(), + }; + }, + }; +} +function batchHeaderFilterPolicy() { + return { + name: "batchHeaderFilterPolicy", + async sendRequest(request, next) { + let xMsHeaderName = ""; + for (const [name] of request.headers) { + if (iEqual(name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = name; } } - if (buffer.length < count) { - throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); - } - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let off = offset; off < offset + count; off = off + options.blockSize) { - batch.addOperation(async () => { - // Exclusive chunk end position - let chunkEnd = offset + count; - if (off + options.blockSize < chunkEnd) { - chunkEnd = off + options.blockSize; - } - const response = await this.download(off, chunkEnd - off, { - abortSignal: options.abortSignal, - conditions: options.conditions, - maxRetryRequests: options.maxRetryRequestsPerBlock, - customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), - }); - const stream = response.readableStreamBody; - await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); - // Update progress after block is downloaded, in case of block trying - // Could provide finer grained progress updating inside HTTP requests, - // only if convenience layer download try is enabled - transferProgress += chunkEnd - off; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }); + if (xMsHeaderName !== "") { + request.headers.delete(xMsHeaderName); // The subrequests should not have the x-ms-version header. } - await batch.do(); - return buffer; + return next(request); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +class BlobBatchClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + else if (!credentialOrPipeline) { + // no credential provided + pipeline = newPipeline(new AnonymousCredential(), options); } - finally { - span.end(); + else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageContextClient(url, getCoreClientOptions(pipeline)); + const path = getURLPath(url); + if (path && path !== "/") { + // Container scoped. + this.serviceOrContainerContext = storageClientContext.container; + } + else { + this.serviceOrContainerContext = storageClientContext.service; } } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Downloads an Azure Blob to a local file. - * Fails if the the given file path already exits. - * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. - * - * @param filePath - - * @param offset - From which position of the block blob to download. - * @param count - How much data to be downloaded. Will download to the end when passing undefined. - * @param options - Options to Blob download options. - * @returns The response data for blob download operation, - * but with readableStreamBody set to undefined since its - * content is already read and written into a local file - * at the specified path. + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. */ - async downloadToFile(filePath, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); - try { - const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - if (response.readableStreamBody) { - await readStreamToLocalFile(response.readableStreamBody, filePath); + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } + else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); } - // The stream is no longer accessible so setting it to undefined. - response.blobDownloadStream = undefined; - return response; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); } + return this.submitBatch(batch); } - getBlobAndContainerNamesFromUrl() { - let containerName; - let blobName; - try { - // URL may look like the following - // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; - // "https://myaccount.blob.core.windows.net/mycontainer/blob"; - // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; - // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; - // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` - // http://localhost:10001/devstoreaccount1/containername/blob - const parsedUrl = coreHttp.URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { - // "https://myaccount.blob.core.windows.net/containername/blob". - // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } - else if (isIpEndpointStyle(parsedUrl)) { - // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob - // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob - // .getPath() -> /devstoreaccount1/containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); - containerName = pathComponents[2]; - blobName = pathComponents[4]; + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); } else { - // "https://customdomain.com/containername/blob". - // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } - // decode the encoded blobName, containerName - to get all the special characters that might be present in them - containerName = decodeURIComponent(containerName); - blobName = decodeURIComponent(blobName); - // Azure Storage Server will replace "\" with "/" in the blob names - // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName - blobName = blobName.replace(/\\/g, "/"); - if (!containerName) { - throw new Error("Provided containerName is invalid."); + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); } - return { blobName, containerName }; - } - catch (error) { - throw new Error("Unable to extract blobName and containerName with provided information."); } + return this.submitBatch(batch); } /** - * Asynchronously copies a blob to a destination within the storage account. - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * Submit batch request which consists of multiple subrequests. * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. - */ - async startCopyFromURL(copySource, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions, - }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Only available for BlobClient constructed with a shared key credential. + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` * - * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. + * Example usage: * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); - }); - } - /** - * Delete the immutablility policy on the blob. + * Example using a lease: * - * @param options - Optional options to delete immutability policy on the blob. - */ - async deleteImmutabilityPolicy(options) { - const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); - try { - return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Set immutablility policy on the blob. + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` * - * @param options - Optional options to set immutability policy on the blob. - */ - async setImmutabilityPolicy(immutabilityPolicy, options) { - const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); - try { - return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Set legal hold on the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch * - * @param options - Optional options to set legal hold on the blob. + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - */ - async setLegalHold(legalHoldEnabled, options) { - const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); - try { - return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); } + return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { + const batchRequestBody = batchRequest.getHttpRequestBody(); + // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. + const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions))); + // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount, + }; + return res; + }); } } + /** - * AppendBlobClient defines a set of operations applicable to append blobs. + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. */ -class AppendBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, +class ContainerClient extends StorageClient { + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { - // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. - // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); let pipeline; let url; options = options || {}; @@ -32144,35 +28472,31 @@ class AppendBlobClient extends BlobClient { url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; - options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - url = urlOrConnectionString; // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName === "string" && - blobNameOrOptions && - typeof blobNameOrOptions === "string") { + typeof credentialOrPipelineOrContainerName === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } @@ -32182,7 +28506,7 @@ class AppendBlobClient extends BlobClient { } else if (extractedCreds.kind === "SASConnString") { url = - appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); @@ -32192,1428 +28516,911 @@ class AppendBlobClient extends BlobClient { } } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + throw new Error("Expecting non-empty strings for containerName parameter"); } super(url, pipeline); - this.appendBlobContext = new AppendBlob(this.storageClientContext); - } - /** - * Creates a new AppendBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. - * - * @param snapshot - The snapshot timestamp. - * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. - */ - withSnapshot(snapshot) { - return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = this.storageClientContext.container; } /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * - * @param options - Options to the Append Block Create operation. + * @param options - Options to Container Create operation. * * * Example usage: * * ```js - * const appendBlobClient = containerClient.getAppendBlobClient(""); - * await appendBlobClient.create(); + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); * ``` */ async create(options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.create(updatedOptions)); + }); } /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * If the blob with the same name already exists, the content of the existing blob will remain unchanged. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * * @param options - */ async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); - const conditions = { ifNoneMatch: ETagAny }; - try { - const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + else { + throw e; + } + } + }); } /** - * Seals the append blob, making it read only. + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. * * @param options - */ - async seal(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); - options.conditions = options.conditions || {}; - try { - return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async exists(options = {}) { + return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + return false; + } + throw e; + } + }); } /** - * Commits a new block of data to the end of the existing append blob. - * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * Creates a {@link BlobClient} * - * @param body - Data to be appended. - * @param contentLength - Length of the body in bytes. - * @param options - Options to the Append Block operation. + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name * * * Example usage: * * ```js - * const content = "Hello World!"; - * - * // Create a new append blob and append data to the blob. - * const newAppendBlobClient = containerClient.getAppendBlobClient(""); - * await newAppendBlobClient.create(); - * await newAppendBlobClient.appendBlock(content, content.length); + * const content = "Hello world!"; * - * // Append data to an existing append blob. - * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); - * await existingAppendBlobClient.appendBlock(content, content.length); + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ - async appendBlock(body, contentLength, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress, - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** - * The Append Block operation commits a new block of data to the end of an existing append blob - * where the contents are read from a source url. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * Creates a {@link PageBlobClient} * - * @param sourceURL - - * The url to the blob that will be the source of the copy. A source blob in the same storage account can - * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob - * must either be public or must be authenticated via a shared access signature. If the source blob is - * public, no authentication is required to perform the operation. - * @param sourceOffset - Offset in source to be appended - * @param count - Number of bytes to be appended as a block - * @param options - + * @param blobName - A page blob name */ - async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } -} -/** - * BlockBlobClient defines a set of operations applicable to block blobs. - */ -class BlockBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. - // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); - let pipeline; - let url; - options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - // (url: string, pipeline: Pipeline) - url = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || - credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - url = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } - else if (!credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName !== "string") { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - // The second parameter is undefined. Use anonymous credential. - url = urlOrConnectionString; - if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { - options = blobNameOrOptions; - } - pipeline = newPipeline(new AnonymousCredential(), options); - } - else if (credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName === "string" && - blobNameOrOptions && - typeof blobNameOrOptions === "string") { - // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } - else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } - else if (extractedCreds.kind === "SASConnString") { - url = - appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + - "?" + - extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } - else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } - else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; } - super(url, pipeline); - this.blockBlobContext = new BlockBlob(this.storageClientContext); - this._blobContext = new Blob$1(this.storageClientContext); + return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions }))); + }); } /** - * Creates a new BlockBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a URL to the base blob. + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container * - * @param snapshot - The snapshot timestamp. - * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + * @param options - Options to Container Delete operation. */ - withSnapshot(snapshot) { - return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.delete({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Quick query for a JSON or CSV formatted blob. - * - * Example usage (Node.js): - * - * ```js - * // Query and convert a blob to a string - * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); - * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); - * console.log("Query blob content:", downloaded); - * - * async function streamToBuffer(readableStream) { - * return new Promise((resolve, reject) => { - * const chunks = []; - * readableStream.on("data", (data) => { - * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); - * }); - * readableStream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * readableStream.on("error", reject); - * }); - * } - * ``` + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container * - * @param query - - * @param options - + * @param options - Options to Container Delete operation. */ - async query(query, options = {}) { - var _a; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); - try { - if (!coreHttp.isNode) { - throw new Error("This operation currently is only supported in Node.js."); + async deleteIfExists(options = {}) { + return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { - queryType: "SQL", - expression: query, - inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration), - }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - return new BlobQueryResponse(response, { - abortSignal: options.abortSignal, - onProgress: options.onProgress, - onError: options.onError, - }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** - * Creates a new block blob, or updates the content of an existing block blob. - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link stageBlock} and {@link commitBlockList}. - * - * This is a non-parallel uploading method, please use {@link uploadFile}, - * {@link uploadStream} or {@link uploadBrowserData} for better performance - * with concurrency uploading. - * - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * Sets one or more user-defined name-value pairs for the specified container. * - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to the Block Blob Upload operation. - * @returns Response data for the Block Blob Upload operation. + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. * - * Example usage: + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata * - * ```js - * const content = "Hello world!"; - * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); - * ``` + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. */ - async upload(body, contentLength, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress, - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; } - finally { - span.end(); + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); } + return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Creates a new Block Blob where the contents of the blob are read from a given URL. - * This API is supported beginning with the 2020-04-08 version. Partial updates - * are not supported with Put Blob from URL; the content of an existing blob is overwritten with - * the content of the new blob. To perform partial updates to a block blob’s contents using a - * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. * - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Optional parameters. + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. */ - async syncUploadFromURL(sourceURL, options = {}) { - var _a, _b, _c, _d, _e; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, - sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, - sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, - sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, - }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; } + return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.getAccessPolicy({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version, + }; + for (const identifier of response) { + let accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions, + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id, + }); + } + return res; + }); } /** - * Uploads the specified block to the block blob's "staging area" to be later - * committed by a call to commitBlockList. - * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. * - * @param blockId - A 64-byte value that is base64-encoded - * @param body - Data to upload to the staging area. - * @param contentLength - Number of bytes to upload. - * @param options - Options to the Block Blob Stage Block operation. - * @returns Response data for the Block Blob Stage Block operation. + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. */ - async stageBlock(blockId, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { - onUploadProgress: options.onProgress, - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "", + }, + id: identifier.id, + }); + } + return assertResponse(await this.containerContext.setAccessPolicy({ + abortSignal: options.abortSignal, + access, + containerAcl: acl, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * The Stage Block From URL operation creates a new block to be committed as part - * of a blob where the contents are read from a URL. - * This API is available starting in version 2018-03-28. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * Get a {@link BlobLeaseClient} that manages leases on the container. * - * @param blockId - A 64-byte value that is base64-encoded - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Options to the Block Blob Stage Block From URL operation. - * @returns Response data for the Block Blob Stage Block From URL operation. + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. */ - async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); } /** - * Writes a blob by specifying the list of block IDs that make up the blob. - * In order to be written as part of a blob, a block must have been successfully written - * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to - * update a blob by uploading only those blocks that have changed, then committing the new and existing - * blocks together. Any blocks not specified in the block list and permanently deleted. - * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * Creates a new block blob, or updates the content of an existing block blob. * - * @param blocks - Array of 64-byte value that is base64-encoded - * @param options - Options to the Block Blob Commit Block List operation. - * @returns Response data for the Block Blob Commit Block List operation. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ - async commitBlockList(blocks, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response, + }; + }); } /** - * Returns the list of blocks that have been uploaded as part of a block blob - * using the specified block list filter. - * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @param listType - Specifies whether to return the list of committed blocks, - * the list of uncommitted blocks, or both lists together. - * @param options - Options to the Block Blob Get Block List operation. - * @returns Response data for the Block Blob Get Block List operation. + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. */ - async getBlockList(listType, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); - try { - const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - if (!res.committedBlocks) { - res.committedBlocks = []; - } - if (!res.uncommittedBlocks) { - res.uncommittedBlocks = []; + async deleteBlob(blobName, options = {}) { + return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); } - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return blobClient.delete(updatedOptions); + }); } - // High level functions /** - * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * - * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; + }); + } + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + async listBlobHierarchySegment(delimiter, marker, options = {}) { + return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }) }) }); + return wrappedResponse; + }); + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse * - * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView - * @param options - + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. */ - async uploadData(data, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); - try { - if (coreHttp.isNode) { - let buffer; - if (data instanceof Buffer) { - buffer = data; - } - else if (data instanceof ArrayBuffer) { - buffer = Buffer.from(data); - } - else { - data = data; - buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + listSegments(marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + listItems() { + return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_1, _b, _c; + let marker; + try { + for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsFlatSegmentResponse = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); } - return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); } - else { - const browserBlob = new Blob([data]); - return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); + } + finally { if (e_1) throw e_1.error; } } - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** - * ONLY AVAILABLE IN BROWSERS. + * Returns an async iterable iterator to list all the blobs + * under the specified account. * - * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * .byPage() returns an async iterable iterator to list the blobs in pages. * - * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call - * {@link commitBlockList} to commit the block list. + * Example using `for await` syntax: * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` * - * @deprecated Use {@link uploadData} instead. + * Example using `iter.next()`: * - * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView - * @param options - Options to upload browser data. - * @returns Response data for the Blob Upload operation. - */ - async uploadBrowserData(browserData, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); - try { - const browserBlob = new Blob([browserData]); - return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` * - * Uploads data to block blob. Requires a bodyFactory as the data source, - * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * Example using `byPage()`: * - * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` * - * @param bodyFactory - - * @param size - size of the data to upload. - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. */ - async uploadSeekableInternal(bodyFactory, size, options = {}) { - if (!options.blockSize) { - options.blockSize = 0; + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); } - if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { - throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + if (options.includeDeleted) { + include.push("deleted"); } - if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { - options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (options.includeMetadata) { + include.push("metadata"); } - if (options.maxSingleShotSize < 0 || - options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { - throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + if (options.includeSnapshots) { + include.push("snapshots"); } - if (options.blockSize === 0) { - if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`${size} is too larger to upload to a block blob.`); - } - if (size > options.maxSingleShotSize) { - options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); - if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - } + if (options.includeVersions) { + include.push("versions"); } - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); } - if (!options.conditions) { - options.conditions = {}; + if (options.includeTags) { + include.push("tags"); } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); - try { - if (size <= options.maxSingleShotSize) { - return await this.upload(bodyFactory(0, size), size, updatedOptions); - } - const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; - if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + - `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); - } - const blockList = []; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let i = 0; i < numBlocks; i++) { - batch.addOperation(async () => { - const blockID = generateBlockID(blockIDPrefix, i); - const start = options.blockSize * i; - const end = i === numBlocks - 1 ? size : start + options.blockSize; - const contentLength = end - start; - blockList.push(blockID); - await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { - abortSignal: options.abortSignal, - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions, - }); - // Update progress after block is successfully uploaded to server, in case of block trying - // TODO: Hook with convenience layer progress event in finer level - transferProgress += contentLength; - if (options.onProgress) { - options.onProgress({ - loadedBytes: transferProgress, - }); - } - }); - } - await batch.do(); - return this.commitBlockList(blockList, updatedOptions); + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); } - finally { - span.end(); + if (options.includeLegalHold) { + include.push("legalhold"); } - } - /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a local file in blocks to a block blob. - * - * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList - * to commit the block list. - * - * @param filePath - Full path of local file - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. - */ - async uploadFile(filePath, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); - try { - const size = (await fsStat(filePath)).size; - return await this.uploadSeekableInternal((offset, count) => { - return () => fsCreateReadStream(filePath, { - autoClose: true, - end: count ? offset + count - 1 : Infinity, - start: offset, - }); - }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a Node.js Readable stream into block blob. - * - * PERFORMANCE IMPROVEMENT TIPS: - * * Input stream highWaterMark is better to set a same value with bufferSize - * parameter, which will avoid Buffer.concat() operations. - * - * @param stream - Node.js Readable stream - * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB - * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, - * positive correlation with max uploading concurrency. Default value is 5 - * @param options - Options to Upload Stream to Block Blob operation. - * @returns Response data for the Blob Upload operation. - */ - async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); - try { - let blockNum = 0; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const blockList = []; - const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { - const blockID = generateBlockID(blockIDPrefix, blockNum); - blockList.push(blockID); - blockNum++; - await this.stageBlock(blockID, body, length, { - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions, - }); - // Update progress after block is successfully uploaded to server, in case of block trying - transferProgress += length; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }, - // concurrency should set a smaller value than maxConcurrency, which is helpful to - // reduce the possibility when a outgoing handler waits for stream data, in - // this situation, outgoing handlers are blocked. - // Outgoing queue shouldn't be empty. - Math.ceil((maxConcurrency / 4) * 3)); - await scheduler.do(); - return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } -} -/** - * PageBlobClient defines a set of operations applicable to page blobs. - */ -class PageBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. - // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); - let pipeline; - let url; - options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - // (url: string, pipeline: Pipeline) - url = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || - credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - url = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } - else if (!credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName !== "string") { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - // The second parameter is undefined. Use anonymous credential. - url = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); - } - else if (credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName === "string" && - blobNameOrOptions && - typeof blobNameOrOptions === "string") { - // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } - else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } - else if (extractedCreds.kind === "SASConnString") { - url = - appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + - "?" + - extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } - else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } - else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url, pipeline); - this.pageBlobContext = new PageBlob(this.storageClientContext); - } - /** - * Creates a new PageBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. - * - * @param snapshot - The snapshot timestamp. - * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. - */ - withSnapshot(snapshot) { - return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); - } - /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob - * - * @param size - size of the page blob. - * @param options - Options to the Page Blob Create operation. - * @returns Response data for the Page Blob Create operation. - */ - async create(size, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-create", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. If the blob with the same name already exists, the content - * of the existing blob will remain unchanged. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob - * - * @param size - size of the page blob. - * @param options - - */ - async createIfNotExists(size, options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); - try { - const conditions = { ifNoneMatch: ETagAny }; - const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. - * @see https://docs.microsoft.com/rest/api/storageservices/put-page - * - * @param body - Data to upload - * @param offset - Offset of destination page blob - * @param count - Content length of the body, also number of bytes to be uploaded - * @param options - Options to the Page Blob Upload Pages operation. - * @returns Response data for the Page Blob Upload Pages operation. - */ - async uploadPages(body, offset, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress, - }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * The Upload Pages operation writes a range of pages to a page blob where the - * contents are read from a URL. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url - * - * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication - * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob - * @param destOffset - Offset of destination page blob - * @param count - Number of bytes to be uploaded from source page blob - * @param options - - */ - async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Frees the specified pages from the page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-page - * - * @param offset - Starting byte position of the pages to clear. - * @param count - Number of bytes to clear. - * @param options - Options to the Page Blob Clear Pages operation. - * @returns Response data for the Page Blob Clear Pages operation. - */ - async clearPages(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); - try { - return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Returns the list of valid page ranges for a page blob or snapshot of a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns Response data for the Page Blob Get Ranges operation. - */ - async getPageRanges(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); - try { - return await this.pageBlobContext - .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * getPageRangesSegment returns a single segment of page ranges starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to PageBlob Get Page Ranges Segment operation. - */ - async listPageRangesSegment(offset = 0, count, marker, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); - try { - return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); + if (options.prefix === "") { + options.prefix = undefined; } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blobs + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; } /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. + * @param delimiter - The character or string used to define the virtual hierarchy * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The + * the list of blobs to be returned with the next listing operation. The * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. - * @param options - Options to List Page Ranges operation. + * @param options - Options to list blobs operation. */ - listPageRangeItemSegments(offset = 0, count, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1() { - let getPageRangeItemSegmentsResponse; + listHierarchySegments(delimiter_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter, marker, options = {}) { + let listBlobsHierarchySegmentResponse; if (!!marker || marker === undefined) { do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options)); - marker = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); } while (marker); } }); } /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to List Page Ranges operation. + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. */ - listPageRangeItems(offset = 0, count, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1() { - var e_1, _a; + listItemsByHierarchy(delimiter_1) { + return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter, options = {}) { + var _a, e_2, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const getPageRangesSegment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + for (var _d = true, _e = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsHierarchySegmentResponse = _c; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); + } + } + for (const blob of segment.blobItems) { + yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); + } } } - catch (e_1_1) { e_1 = { error: e_1_1 }; } + catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } - finally { if (e_1) throw e_1.error; } + finally { if (e_2) throw e_2.error; } } }); } /** - * Returns an async iterable iterator to list of page ranges for a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. * - * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. * * Example using `for await` syntax: * * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` - * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRanges()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } * } * ``` * * Example using `iter.next()`: * * ```js - * let i = 1; - * let iter = pageBlobClient.listPageRanges(); - * let pageRangeItem = await iter.next(); - * while (!pageRangeItem.done) { - * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); - * pageRangeItem = await iter.next(); + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` * - * Example using paging with a marker: + * Example using paging with a max page size: * * ```js - * let i = 1; - * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 page ranges - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * - * // Gets next marker - * let marker = response.continuationToken; + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); * - * // Passing next marker as continuationToken + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; * - * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } * - * // Prints 10 page ranges - * for (const blob of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } * } * ``` - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns An asyncIterableIterator that supports paging. - */ - listPageRanges(offset = 0, count, options = {}) { - options.conditions = options.conditions || {}; - // AsyncIterableIterator to iterate over blobs - const iter = this.listPageRangeItems(offset, count, options); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); - }, - }; - } - /** - * Gets the collection of page ranges that differ between a specified snapshot and this page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param offset - Starting byte position of the page blob - * @param count - Number of bytes to get ranges diff. - * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - * @returns Response data for the Page Blob Get Page Range Diff operation. + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. */ - async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + const include = []; + if (options.includeCopy) { + include.push("copy"); } - finally { - span.end(); + if (options.includeDeleted) { + include.push("deleted"); } - } - /** - * getPageRangesDiffSegment returns a single segment of page ranges starting from the - * specified Marker for difference between previous snapshot and the target page blob. - * Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesDiffSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - */ - async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); - try { - return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ - offset: offset, - count: count, - }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + if (options.includeMetadata) { + include.push("metadata"); } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + if (options.includeSnapshots) { + include.push("snapshots"); } - finally { - span.end(); + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blob prefixes and blobs + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; } /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + }); + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The - * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @param options - Options to find blobs by tags. */ - listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { - let getPageRangeItemSegmentsResponse; + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { + let response; if (!!marker || marker === undefined) { do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); - marker = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); } while (marker); } }); } /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * Returns an AsyncIterableIterator for blobs. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. */ - listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { - var e_2, _a; + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_3, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const getPageRangesSegment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } - catch (e_2_1) { e_2 = { error: e_2_1 }; } + catch (e_3_1) { e_3 = { error: e_3_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } - finally { if (e_2) throw e_2.error; } + finally { if (e_3) throw e_3.error; } } }); } /** - * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. * - * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * .byPage() returns an async iterable iterator to list the blobs in pages. * * Example using `for await` syntax: * * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * ``` * @@ -33621,11 +29428,11 @@ class PageBlobClient extends BlobClient { * * ```js * let i = 1; - * let iter = pageBlobClient.listPageRangesDiff(); - * let pageRangeItem = await iter.next(); - * while (!pageRangeItem.done) { - * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); - * pageRangeItem = await iter.next(); + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); * } * ``` * @@ -33634,9 +29441,11 @@ class PageBlobClient extends BlobClient { * ```js * // passing optional maxPageSize in the page settings * let i = 1; - * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * } * ``` @@ -33645,37 +29454,42 @@ class PageBlobClient extends BlobClient { * * ```js * let i = 1; - * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * - * // Prints 2 page ranges - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * - * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * - * // Prints 10 page ranges - * for (const blob of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * ``` - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns An asyncIterableIterator that supports paging. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. */ - listPageRangesDiff(offset, count, prevSnapshot, options = {}) { - options.conditions = options.conditions || {}; + findBlobsByTags(tagFilterSqlExpression, options = {}) { // AsyncIterableIterator to iterate over blobs - const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { /** * The next method, part of the iteration protocol @@ -33693,1410 +29507,892 @@ class PageBlobClient extends BlobClient { * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); }, }; } - /** - * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page blob - * @param count - Number of bytes to get ranges diff. - * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - * @returns Response data for the Page Blob Get Page Range Diff operation. - */ - async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); + getContainerNameFromUrl() { + let containerName; try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` + // http://localhost:10001/devstoreaccount1/containername + const parsedUrl = new URL(this.url); + if (parsedUrl.hostname.split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername". + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.pathname.split("/")[1]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername + // .getPath() -> /devstoreaccount1/containername + containerName = parsedUrl.pathname.split("/")[2]; + } + else { + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.pathname.split("/")[1]; + } + // decode the encoded containerName - to get all the special characters that might be present in it + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; } - finally { - span.end(); + catch (error) { + throw new Error("Unable to extract containerName with provided information."); } } /** - * Resizes the page blob to the specified size (which must be a multiple of 512). - * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * Only available for ContainerClient constructed with a shared key credential. * - * @param size - Target size - * @param options - Options to the Page Blob Resize operation. - * @returns Response data for the Page Blob Resize operation. - */ - async resize(size, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); - try { - return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Sets a page blob's sequence number. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. - * @param sequenceNumber - Required if sequenceNumberAction is max or update - * @param options - Options to the Page Blob Update Sequence Number operation. - * @returns Response data for the Page Blob Update Sequence Number operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); - try { - return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); } /** - * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. - * The snapshot is copied such that only the differential changes between the previously - * copied snapshot are transferred to the destination. - * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. - * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob - * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * Creates a BlobBatchClient object to conduct batch operations. * - * @param copySource - Specifies the name of the source page blob snapshot. For example, - * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Options to the Page Blob Copy Incremental operation. - * @returns Response data for the Page Blob Copy Incremental operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. */ - async startCopyIncremental(copySource, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); - try { - return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); } } // Copyright (c) Microsoft Corporation. -async function getBodyAsText(batchResponse) { - let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); - const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); - // Slice the buffer to trim the empty ending. - buffer = buffer.slice(0, responseLength); - return buffer.toString(); -} -function utf8ByteLength(str) { - return Buffer.byteLength(str); -} - -// Copyright (c) Microsoft Corporation. -const HTTP_HEADER_DELIMITER = ": "; -const SPACE_DELIMITER = " "; -const NOT_FOUND = -1; +// Licensed under the MIT license. /** - * Util class for parsing batch response. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. */ -class BatchResponseParser { - constructor(batchResponse, subRequests) { - if (!batchResponse || !batchResponse.contentType) { - // In special case(reported), server may return invalid content-type which could not be parsed. - throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); - } - if (!subRequests || subRequests.size === 0) { - // This should be prevent during coding. - throw new RangeError("Invalid state: subRequests is not provided or size is 0."); - } - this.batchResponse = batchResponse; - this.subRequests = subRequests; - this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; - this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; - this.batchResponseEnding = `--${this.responseBatchBoundary}--`; +class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to delete blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } - // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response - async parseBatchResponse() { - // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse - // sub request's response. - if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { - throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); - } - const responseBodyAsText = await getBodyAsText(this.batchResponse); - const subResponses = responseBodyAsText - .split(this.batchResponseEnding)[0] // string after ending is useless - .split(this.perResponsePrefix) - .slice(1); // string before first response boundary is useless - const subResponseCount = subResponses.length; - // Defensive coding in case of potential error parsing. - // Note: subResponseCount == 1 is special case where sub request is invalid. - // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. - // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. - if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { - throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); - } - const deserializedSubResponses = new Array(subResponseCount); - let subResponsesSucceededCount = 0; - let subResponsesFailedCount = 0; - // Parse sub subResponses. - for (let index = 0; index < subResponseCount; index++) { - const subResponse = subResponses[index]; - const deserializedSubResponse = {}; - deserializedSubResponse.headers = new coreHttp.HttpHeaders(); - const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); - let subRespHeaderStartFound = false; - let subRespHeaderEndFound = false; - let subRespFailed = false; - let contentId = NOT_FOUND; - for (const responseLine of responseLines) { - if (!subRespHeaderStartFound) { - // Convention line to indicate content ID - if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { - contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); - } - // Http version line with status code indicates the start of sub request's response. - // Example: HTTP/1.1 202 Accepted - if (responseLine.startsWith(HTTP_VERSION_1_1)) { - subRespHeaderStartFound = true; - const tokens = responseLine.split(SPACE_DELIMITER); - deserializedSubResponse.status = parseInt(tokens[1]); - deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); - } - continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * - } - if (responseLine.trim() === "") { - // Sub response's header start line already found, and the first empty line indicates header end line found. - if (!subRespHeaderEndFound) { - subRespHeaderEndFound = true; - } - continue; // Skip empty line - } - // Note: when code reach here, it indicates subRespHeaderStartFound == true - if (!subRespHeaderEndFound) { - if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { - // Defensive coding to prevent from missing valuable lines. - throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); - } - // Parse headers of sub response. - const tokens = responseLine.split(HTTP_HEADER_DELIMITER); - deserializedSubResponse.headers.set(tokens[0], tokens[1]); - if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { - deserializedSubResponse.errorCode = tokens[1]; - subRespFailed = true; - } - } - else { - // Assemble body of sub response. - if (!deserializedSubResponse.bodyAsText) { - deserializedSubResponse.bodyAsText = ""; - } - deserializedSubResponse.bodyAsText += responseLine; - } - } // Inner for end - // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. - // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it - // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that - // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. - if (contentId !== NOT_FOUND && - Number.isInteger(contentId) && - contentId >= 0 && - contentId < this.subRequests.size && - deserializedSubResponses[contentId] === undefined) { - deserializedSubResponse._request = this.subRequests.get(contentId); - deserializedSubResponses[contentId] = deserializedSubResponse; - } - else { - logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); - } - if (subRespFailed) { - subResponsesFailedCount++; - } - else { - subResponsesSucceededCount++; - } - } - return { - subResponses: deserializedSubResponses, - subResponsesSucceededCount: subResponsesSucceededCount, - subResponsesFailedCount: subResponsesFailedCount, - }; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -var MutexLockStatus; -(function (MutexLockStatus) { - MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; - MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; -})(MutexLockStatus || (MutexLockStatus = {})); -/** - * An async mutex lock. - */ -class Mutex { /** - * Lock for a specific key. If the lock has been acquired by another customer, then - * will wait until getting the lock. + * Parse initializes the AccountSASPermissions fields from a string. * - * @param key - lock key + * @param permissions - */ - static async lock(key) { - return new Promise((resolve) => { - if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { - this.keys[key] = MutexLockStatus.LOCKED; - resolve(); - } - else { - this.onUnlockEvent(key, () => { - this.keys[key] = MutexLockStatus.LOCKED; - resolve(); - }); + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); } - }); + } + return accountSASPermissions; } /** - * Unlock a key. + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. * - * @param key - + * @param permissionLike - */ - static async unlock(key) { - return new Promise((resolve) => { - if (this.keys[key] === MutexLockStatus.LOCKED) { - this.emitUnlockEvent(key); - } - delete this.keys[key]; - resolve(); - }); - } - static onUnlockEvent(key, handler) { - if (this.listeners[key] === undefined) { - this.listeners[key] = [handler]; - } - else { - this.listeners[key].push(handler); + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; } - } - static emitUnlockEvent(key) { - if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { - const handler = this.listeners[key].shift(); - setImmediate(() => { - handler.call(this); - }); + if (permissionLike.write) { + accountSASPermissions.write = true; } - } -} -Mutex.keys = {}; -Mutex.listeners = {}; - -// Copyright (c) Microsoft Corporation. -/** - * A BlobBatch represents an aggregated set of operations on blobs. - * Currently, only `delete` and `setAccessTier` are supported. - */ -class BlobBatch { - constructor() { - this.batch = "batch"; - this.batchRequest = new InnerBatchRequest(); - } - /** - * Get the value of Content-Type for a batch request. - * The value must be multipart/mixed with a batch boundary. - * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 - */ - getMultiPartContentType() { - return this.batchRequest.getMultipartContentType(); - } - /** - * Get assembled HTTP request body for sub requests. - */ - getHttpRequestBody() { - return this.batchRequest.getHttpRequestBody(); - } - /** - * Get sub requests that are added into the batch request. - */ - getSubRequests() { - return this.batchRequest.getSubRequests(); - } - async addSubRequestInternal(subRequest, assembleSubRequestFunc) { - await Mutex.lock(this.batch); - try { - this.batchRequest.preAddSubRequest(subRequest); - await assembleSubRequestFunc(); - this.batchRequest.postAddSubRequest(subRequest); + if (permissionLike.delete) { + accountSASPermissions.delete = true; } - finally { - await Mutex.unlock(this.batch); + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; } - } - setBatchType(batchType) { - if (!this.batchType) { - this.batchType = batchType; + if (permissionLike.filter) { + accountSASPermissions.filter = true; } - if (this.batchType !== batchType) { - throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + if (permissionLike.tag) { + accountSASPermissions.tag = true; } - } - async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { - let url; - let credential; - if (typeof urlOrBlobClient === "string" && - ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || - credentialOrOptions instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrOptions))) { - // First overload - url = urlOrBlobClient; - credential = credentialOrOptions; + if (permissionLike.list) { + accountSASPermissions.list = true; } - else if (urlOrBlobClient instanceof BlobClient) { - // Second overload - url = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - options = credentialOrOptions; + if (permissionLike.add) { + accountSASPermissions.add = true; } - else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + if (permissionLike.create) { + accountSASPermissions.create = true; } - if (!options) { - options = {}; + if (permissionLike.update) { + accountSASPermissions.update = true; } - const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); - try { - this.setBatchType("delete"); - await this.addSubRequestInternal({ - url: url, - credential: credential, - }, async () => { - await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); - }); + if (permissionLike.process) { + accountSASPermissions.process = true; } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; } - finally { - span.end(); + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; } + return accountSASPermissions; } - async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { - let url; - let credential; - let tier; - if (typeof urlOrBlobClient === "string" && - ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || - credentialOrTier instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrTier))) { - // First overload - url = urlOrBlobClient; - credential = credentialOrTier; - tier = tierOrOptions; + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); } - else if (urlOrBlobClient instanceof BlobClient) { - // Second overload - url = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - tier = credentialOrTier; - options = tierOrOptions; + if (this.write) { + permissions.push("w"); } - else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + if (this.delete) { + permissions.push("d"); } - if (!options) { - options = {}; + if (this.deleteVersion) { + permissions.push("x"); } - const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); - try { - this.setBatchType("setAccessTier"); - await this.addSubRequestInternal({ - url: url, - credential: credential, - }, async () => { - await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); - }); + if (this.filter) { + permissions.push("f"); } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + if (this.tag) { + permissions.push("t"); } - finally { - span.end(); + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); } } + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * Inner batch request class which is responsible for assembling and serializing sub requests. - * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. */ -class InnerBatchRequest { +class AccountSASResourceTypes { constructor() { - this.operationCount = 0; - this.body = ""; - const tempGuid = coreHttp.generateUuid(); - // batch_{batchid} - this.boundary = `batch_${tempGuid}`; - // --batch_{batchid} - // Content-Type: application/http - // Content-Transfer-Encoding: binary - this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; - // multipart/mixed; boundary=batch_{batchid} - this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; - // --batch_{batchid}-- - this.batchRequestEnding = `--${this.boundary}--`; - this.subRequests = new Map(); + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; } /** - * Create pipeline to assemble sub requests. The idea here is to use existing - * credential and serialization/deserialization components, with additional policies to - * filter unnecessary headers, assemble sub requests into request's body - * and intercept request from going to wire. - * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - */ - createPipeline(credential) { - const isAnonymousCreds = credential instanceof AnonymousCredential; - const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] - const factories = new Array(policyFactoryLength); - factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer - factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers - if (!isAnonymousCreds) { - factories[2] = coreHttp.isTokenCredential(credential) - ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) - : credential; - } - factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire - return new Pipeline(factories, {}); - } - appendSubRequestToBody(request) { - // Start to assemble sub request - this.body += [ - this.subRequestPrefix, - `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, - "", - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method - ].join(HTTP_LINE_ENDING); - for (const header of request.headers.headersArray()) { - this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } } - this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line - // No body to assemble for current batch request support - // End to assemble sub request + return accountSASResourceTypes; } - preAddSubRequest(subRequest) { - if (this.operationCount >= BATCH_MAX_REQUEST) { - throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); - } - // Fast fail if url for sub request is invalid - const path = getURLPath(subRequest.url); - if (!path || path === "") { - throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); } - } - postAddSubRequest(subRequest) { - this.subRequests.set(this.operationCount, subRequest); - this.operationCount++; - } - // Return the http request body with assembling the ending line to the sub request body. - getHttpRequestBody() { - return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; - } - getMultipartContentType() { - return this.multipartContentType; - } - getSubRequests() { - return this.subRequests; - } -} -class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { - constructor(batchRequest, nextPolicy, options) { - super(nextPolicy, options); - this.dummyResponse = { - request: new coreHttp.WebResource(), - status: 200, - headers: new coreHttp.HttpHeaders(), - }; - this.batchRequest = batchRequest; - } - async sendRequest(request) { - await this.batchRequest.appendSubRequestToBody(request); - return this.dummyResponse; // Intercept request from going to wire - } -} -class BatchRequestAssemblePolicyFactory { - constructor(batchRequest) { - this.batchRequest = batchRequest; - } - create(nextPolicy, options) { - return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); - } -} -class BatchHeaderFilterPolicy extends coreHttp.BaseRequestPolicy { - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(request) { - let xMsHeaderName = ""; - for (const header of request.headers.headersArray()) { - if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { - xMsHeaderName = header.name; - } + if (this.container) { + resourceTypes.push("c"); } - if (xMsHeaderName !== "") { - request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. + if (this.object) { + resourceTypes.push("o"); } - return this._nextPolicy.sendRequest(request); - } -} -class BatchHeaderFilterPolicyFactory { - create(nextPolicy, options) { - return new BatchHeaderFilterPolicy(nextPolicy, options); + return resourceTypes.join(""); } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. */ -class BlobBatchClient { - constructor(url, credentialOrPipeline, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - let pipeline; - if (isPipelineLike(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } - else if (!credentialOrPipeline) { - // no credential provided - pipeline = newPipeline(new AnonymousCredential(), options); - } - else { - pipeline = newPipeline(credentialOrPipeline, options); - } - const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); - const path = getURLPath(url); - if (path && path !== "/") { - // Container scoped. - this.serviceOrContainerContext = new Container(storageClientContext); - } - else { - this.serviceOrContainerContext = new Service(storageClientContext); - } +class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; } /** - * Creates a {@link BlobBatch}. - * A BlobBatch represents an aggregated set of operations on blobs. + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - */ - createBatch() { - return new BlobBatch(); - } - async deleteBlobs(urlsOrBlobClients, credentialOrOptions, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - const batch = new BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); - } - else { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); - } - } - return this.submitBatch(batch); - } - async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - const batch = new BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); - } - else { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); } } - return this.submitBatch(batch); + return accountSASServices; } /** - * Submit batch request which consists of multiple subrequests. - * - * Get `blobBatchClient` and other details before running the snippets. - * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` - * - * Example usage: - * - * ```js - * let batchRequest = new BlobBatch(); - * await batchRequest.deleteBlob(urlInString0, credential0); - * await batchRequest.deleteBlob(urlInString1, credential1, { - * deleteSnapshots: "include" - * }); - * const batchResp = await blobBatchClient.submitBatch(batchRequest); - * console.log(batchResp.subResponsesSucceededCount); - * ``` - * - * Example using a lease: - * - * ```js - * let batchRequest = new BlobBatch(); - * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); - * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { - * conditions: { leaseId: leaseId } - * }); - * const batchResp = await blobBatchClient.submitBatch(batchRequest); - * console.log(batchResp.subResponsesSucceededCount); - * ``` - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * Converts the given services to a string. * - * @param batchRequest - A set of Delete or SetTier operations. - * @param options - */ - async submitBatch(batchRequest, options = {}) { - if (!batchRequest || batchRequest.getSubRequests().size === 0) { - throw new RangeError("Batch request should contain one or more sub requests."); + toString() { + const services = []; + if (this.blob) { + services.push("b"); } - const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); - try { - const batchRequestBody = batchRequest.getHttpRequestBody(); - // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. - const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). - const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); - const responseSummary = await batchResponseParser.parseBatchResponse(); - const res = { - _response: rawBatchResponse._response, - contentType: rawBatchResponse.contentType, - errorCode: rawBatchResponse.errorCode, - requestId: rawBatchResponse.requestId, - clientRequestId: rawBatchResponse.clientRequestId, - version: rawBatchResponse.version, - subResponses: responseSummary.subResponses, - subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount, - }; - return res; + if (this.table) { + services.push("t"); } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + if (this.queue) { + services.push("q"); } - finally { - span.end(); + if (this.file) { + services.push("f"); } + return services.join(""); } } +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** - * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - */ -class ContainerClient extends StorageClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, +function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +class BlobServiceClient extends StorageClient { + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { - let pipeline; - let url; options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - // (url: string, pipeline: Pipeline) - url = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || - credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - url = urlOrConnectionString; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } - else if (!credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName !== "string") { - // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) - // The second parameter is undefined. Use anonymous credential. - url = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); - } - else if (credentialOrPipelineOrContainerName && - typeof credentialOrPipelineOrContainerName === "string") { - // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } - else { - throw new Error("Account connection string is only supported in Node.js environment"); + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreUtil.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } - } - else if (extractedCreds.kind === "SASConnString") { - url = - appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + - "?" + - extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + throw new Error("Account connection string is only supported in Node.js environment"); } } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } else { - throw new Error("Expecting non-empty strings for containerName parameter"); + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } - super(url, pipeline); - this._containerName = this.getContainerNameFromUrl(); - this.containerContext = new Container(this.storageClientContext); } - /** - * The name of the container. - */ - get containerName() { - return this._containerName; + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((coreUtil.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = this.storageClientContext.service; } /** - * Creates a new container under the specified account. If the container with - * the same name already exists, the operation fails. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container - * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata - * - * @param options - Options to Container Create operation. + * Creates a {@link ContainerClient} object * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. * * Example usage: * * ```js * const containerClient = blobServiceClient.getContainerClient(""); - * const createContainerResponse = await containerClient.create(); - * console.log("Container was created successfully", createContainerResponse.requestId); * ``` */ - async create(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-create", options); - try { - // Spread operator in destructuring assignments, - // this will filter out unwanted properties from the response object into result object - return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); } /** - * Creates a new container under the specified account. If the container with - * the same name already exists, it is not changed. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container - * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container * - * @param options - + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. */ - async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); - try { - const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async createContainer(containerName, options = {}) { + return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + }); } /** - * Returns true if the Azure container resource represented by this client exists; false otherwise. + * Deletes a Blob container. * - * NOTE: use this function with care since an existing container might be deleted by other clients or - * applications. Vice versa new containers with the same name might be added by other clients or - * applications after this function completes. + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(containerName); + return containerClient.delete(updatedOptions); + }); + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. * - * @param options - + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-exists", options); - try { - await this.getProperties({ - abortSignal: options.abortSignal, + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = containerClient["storageClientContext"].container; + const containerUndeleteResponse = assertResponse(await containerContext.restore({ + deletedContainerName, + deletedContainerVersion, tracingOptions: updatedOptions.tracingOptions, - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence", - }); - return false; - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + })); + return { containerClient, containerUndeleteResponse }; + }); } /** - * Creates a {@link BlobClient} + * Rename an existing Blob Container. * - * @param blobName - A blob name - * @returns A new BlobClient object for the given blob name. + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. */ - getBlobClient(blobName) { - return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => { + var _a; + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = containerClient["storageClientContext"].container; + const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))); + return { containerClient, containerRenameResponse }; + }); } /** - * Creates an {@link AppendBlobClient} + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties * - * @param blobName - An append blob name + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. */ - getAppendBlobClient(blobName) { - return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + async getProperties(options = {}) { + return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Creates a {@link BlockBlobClient} - * - * @param blobName - A block blob name - * - * - * Example usage: - * - * ```js - * const content = "Hello world!"; + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties * - * const blockBlobClient = containerClient.getBlockBlobClient(""); - * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); - * ``` + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. */ - getBlockBlobClient(blobName) { - return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + async setProperties(properties, options = {}) { + return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.setProperties(properties, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Creates a {@link PageBlobClient} + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats * - * @param blobName - A page blob name + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. */ - getPageBlobClient(blobName) { - return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + async getStatistics(options = {}) { + return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getStatistics({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Returns all user-defined metadata and system properties for the specified - * container. The data returned does not include the container's list of blobs. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties - * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which - * will retain their original casing. + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information * - * @param options - Options to Container Get Properties operation. + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. */ - async getProperties(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); - try { - return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Marks the specified container for deletion. The container and any blobs - * contained within it are later deleted during garbage collection. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container - * - * @param options - Options to Container Delete operation. - */ - async delete(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-delete", options); - try { - return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Marks the specified container for deletion if it exists. The container and any blobs - * contained within it are later deleted during garbage collection. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container - * - * @param options - Options to Container Delete operation. - */ - async deleteIfExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async getAccountInfo(options = {}) { + return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Sets one or more user-defined name-value pairs for the specified container. - * - * If no option provided, or no metadata defined in the parameter, the container - * metadata will be removed. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Options to Container Set Metadata operation. + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. */ - async setMetadata(metadata, options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - if (options.conditions.ifUnmodifiedSince) { - throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); - } - const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); - try { - return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async listContainersSegment(marker, options = {}) { + return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions }))); + }); } /** - * Gets the permissions for the specified container. The permissions indicate - * whether container data may be accessed publicly. - * - * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. - * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. * - * @param options - Options to Container Get Access Policy operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - async getAccessPolicy(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); - try { - const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - const res = { - _response: response._response, - blobPublicAccess: response.blobPublicAccess, - date: response.date, - etag: response.etag, - errorCode: response.errorCode, - lastModified: response.lastModified, - requestId: response.requestId, - clientRequestId: response.clientRequestId, - signedIdentifiers: [], - version: response.version, - }; - for (const identifier of response) { - let accessPolicy = undefined; - if (identifier.accessPolicy) { - accessPolicy = { - permissions: identifier.accessPolicy.permissions, - }; - if (identifier.accessPolicy.expiresOn) { - accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); - } - if (identifier.accessPolicy.startsOn) { - accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; } - } - res.signedIdentifiers.push({ - accessPolicy, - id: identifier.id, - }); - } - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Sets the permissions for the specified container. The permissions indicate - * whether blobs in a container may be accessed publicly. - * - * When you set permissions for a container, the existing permissions are replaced. - * If no access or containerAcl provided, the existing container ACL will be - * removed. - * - * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. - * During this interval, a shared access signature that is associated with the stored access policy will - * fail with status code 403 (Forbidden), until the access policy becomes active. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl - * - * @param access - The level of public access to data in the container. - * @param containerAcl - Array of elements each having a unique Id and details of the access policy. - * @param options - Options to Container Set Access Policy operation. - */ - async setAccessPolicy(access, containerAcl, options = {}) { - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); - try { - const acl = []; - for (const identifier of containerAcl || []) { - acl.push({ - accessPolicy: { - expiresOn: identifier.accessPolicy.expiresOn - ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) - : "", - permissions: identifier.accessPolicy.permissions, - startsOn: identifier.accessPolicy.startsOn - ? truncatedISO8061Date(identifier.accessPolicy.startsOn) - : "", - }, - id: identifier.id, - }); - } - return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Get a {@link BlobLeaseClient} that manages leases on the container. - * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the container. - */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); - } - /** - * Creates a new block blob, or updates the content of an existing block blob. - * - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. - * - * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, - * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better - * performance with concurrency uploading. - * - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob - * - * @param blobName - Name of the block blob to create or update. - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to configure the Block Blob Upload operation. - * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. - */ - async uploadBlockBlob(blobName, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); - try { - const blockBlobClient = this.getBlockBlobClient(blobName); - const response = await blockBlobClient.upload(body, contentLength, updatedOptions); - return { - blockBlobClient, - response, - }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob - * - * @param blobName - - * @param options - Options to Blob Delete operation. - * @returns Block blob deletion response data. - */ - async deleteBlob(blobName, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); - try { - let blobClient = this.getBlobClient(blobName); - if (options.versionId) { - blobClient = blobClient.withVersion(options.versionId); - } - return await blobClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * listBlobFlatSegment returns a single segment of blobs starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call listBlobsFlatSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs - * - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to Container List Blob Flat Segment operation. - */ - async listBlobFlatSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); - try { - const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); - return blobItem; - }) }) }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * listBlobHierarchySegment returns a single segment of blobs starting from - * the specified Marker. Use an empty Marker to start enumeration from the - * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment - * again (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs - * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to Container List Blob Hierarchy Segment operation. - */ - async listBlobHierarchySegment(delimiter, marker, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); - try { - const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); - return blobItem; - }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }) }) }); + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** - * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The - * operation returns the ContinuationToken value within the response body if the + * operation returns the continuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed - * with the current page. The ContinuationToken value can be used as the value for + * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. - * @param options - Options to list blobs operation. + * @param options - Options to find blobs by tags. */ - listSegments(marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { - let listBlobsFlatSegmentResponse; + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { + let response; if (!!marker || marker === undefined) { do { - listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); - marker = listBlobsFlatSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); } while (marker); } }); } /** - * Returns an AsyncIterableIterator of {@link BlobItem} objects + * Returns an AsyncIterableIterator for blobs. * - * @param options - Options to list blobs operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItems_1() { - var e_1, _a; + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_1, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const listBlobsFlatSegmentResponse = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } }); } /** - * Returns an async iterable iterator to list all the blobs + * Returns an async iterable iterator to find all blobs with specified tag * under the specified account. * * .byPage() returns an async iterable iterator to list the blobs in pages. * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * * Example using `for await` syntax: * * ```js - * // Get the containerClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("");` * let i = 1; - * for await (const blob of containerClient.listBlobsFlat()) { - * console.log(`Blob ${i++}: ${blob.name}`); + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); * } * ``` * @@ -35104,7 +30400,7 @@ class ContainerClient extends StorageClient { * * ```js * let i = 1; - * let iter = containerClient.listBlobsFlat(); + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); * let blobItem = await iter.next(); * while (!blobItem.done) { * console.log(`Blob ${i++}: ${blobItem.value.name}`); @@ -35117,9 +30413,11 @@ class ContainerClient extends StorageClient { * ```js * // passing optional maxPageSize in the page settings * let i = 1; - * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * } * ``` @@ -35128,69 +30426,42 @@ class ContainerClient extends StorageClient { * * ```js * let i = 1; - * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 blob names - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * - * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * - * // Prints 10 blob names - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * ``` * - * @param options - Options to list blobs. - * @returns An asyncIterableIterator that supports paging. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. */ - listBlobsFlat(options = {}) { - const include = []; - if (options.includeCopy) { - include.push("copy"); - } - if (options.includeDeleted) { - include.push("deleted"); - } - if (options.includeMetadata) { - include.push("metadata"); - } - if (options.includeSnapshots) { - include.push("snapshots"); - } - if (options.includeVersions) { - include.push("versions"); - } - if (options.includeUncommitedBlobs) { - include.push("uncommittedblobs"); - } - if (options.includeTags) { - include.push("tags"); - } - if (options.includeDeletedWithVersions) { - include.push("deletedwithversions"); - } - if (options.includeImmutabilityPolicy) { - include.push("immutabilitypolicy"); - } - if (options.includeLegalHold) { - include.push("legalhold"); - } - if (options.prefix === "") { - options.prefix = undefined; - } - const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + findBlobsByTags(tagFilterSqlExpression, options = {}) { // AsyncIterableIterator to iterate over blobs - const iter = this.listItems(updatedOptions); + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { /** * The next method, part of the iteration protocol @@ -35208,319 +30479,74 @@ class ContainerClient extends StorageClient { * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); }, }; } /** - * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses * - * @param delimiter - The character or string used to define the virtual hierarchy * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the ContinuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list blobs operation. + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. */ - listHierarchySegments(delimiter, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1() { - let listBlobsHierarchySegmentResponse; + listSegments(marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { + let listContainersSegmentResponse; if (!!marker || marker === undefined) { do { - listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); - marker = listBlobsHierarchySegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); + listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); } while (marker); } }); } /** - * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * Returns an AsyncIterableIterator for Container Items * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param options - Options to list blobs operation. + * @param options - Options to list containers operation. */ - listItemsByHierarchy(delimiter, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { - var e_2, _a; + listItems() { + return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_2, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const listBlobsHierarchySegmentResponse = _c.value; - const segment = listBlobsHierarchySegmentResponse.segment; - if (segment.blobPrefixes) { - for (const prefix of segment.blobPrefixes) { - yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); - } - } - for (const blob of segment.blobItems) { - yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); - } + for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } }); } /** - * Returns an async iterable iterator to list all the blobs by hierarchy. + * Returns an async iterable iterator to list all the containers * under the specified account. * - * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. - * - * Example using `for await` syntax: - * - * ```js - * for await (const item of containerClient.listBlobsByHierarchy("/")) { - * if (item.kind === "prefix") { - * console.log(`\tBlobPrefix: ${item.name}`); - * } else { - * console.log(`\tBlobItem: name - ${item.name}`); - * } - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); - * let entity = await iter.next(); - * while (!entity.done) { - * let item = entity.value; - * if (item.kind === "prefix") { - * console.log(`\tBlobPrefix: ${item.name}`); - * } else { - * console.log(`\tBlobItem: name - ${item.name}`); - * } - * entity = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * console.log("Listing blobs by hierarchy by page"); - * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { - * const segment = response.segment; - * if (segment.blobPrefixes) { - * for (const prefix of segment.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } - * } - * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } - * } - * ``` - * - * Example using paging with a max page size: - * - * ```js - * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); - * - * let i = 1; - * for await (const response of containerClient - * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) - * .byPage({ maxPageSize: 2 })) { - * console.log(`Page ${i++}`); - * const segment = response.segment; - * - * if (segment.blobPrefixes) { - * for (const prefix of segment.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } - * } - * - * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } - * } - * ``` - * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param options - Options to list blobs operation. - */ - listBlobsByHierarchy(delimiter, options = {}) { - if (delimiter === "") { - throw new RangeError("delimiter should contain one or more characters"); - } - const include = []; - if (options.includeCopy) { - include.push("copy"); - } - if (options.includeDeleted) { - include.push("deleted"); - } - if (options.includeMetadata) { - include.push("metadata"); - } - if (options.includeSnapshots) { - include.push("snapshots"); - } - if (options.includeVersions) { - include.push("versions"); - } - if (options.includeUncommitedBlobs) { - include.push("uncommittedblobs"); - } - if (options.includeTags) { - include.push("tags"); - } - if (options.includeDeletedWithVersions) { - include.push("deletedwithversions"); - } - if (options.includeImmutabilityPolicy) { - include.push("immutabilitypolicy"); - } - if (options.includeLegalHold) { - include.push("legalhold"); - } - if (options.prefix === "") { - options.prefix = undefined; - } - const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); - // AsyncIterableIterator to iterate over blob prefixes and blobs - const iter = this.listItemsByHierarchy(delimiter, updatedOptions); - return { - /** - * The next method, part of the iteration protocol - */ - async next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - }, - }; - } - /** - * The Filter Blobs operation enables callers to list blobs in the container whose tags - * match a given search expression. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); - try { - const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a; - let tagValue = ""; - if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { - let response; - if (!!marker || marker === undefined) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); - response.blobs = response.blobs || []; - marker = response.continuationToken; - yield yield tslib.__await(response); - } while (marker); - } - }); - } - /** - * Returns an AsyncIterableIterator for blobs. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to findBlobsByTagsItems. - */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { - var e_3, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } - catch (e_3_1) { e_3 = { error: e_3_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } - finally { if (e_3) throw e_3.error; } - } - }); - } - /** - * Returns an async iterable iterator to find all blobs with specified tag - * under the specified container. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. + * .byPage() returns an async iterable iterator to list the containers in pages. * * Example using `for await` syntax: * * ```js * let i = 1; - * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${blob.name}`); + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); * } * ``` * @@ -35528,11 +30554,11 @@ class ContainerClient extends StorageClient { * * ```js * let i = 1; - * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); * } * ``` * @@ -35541,10 +30567,10 @@ class ContainerClient extends StorageClient { * ```js * // passing optional maxPageSize in the page settings * let i = 1; - * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); * } * } * } @@ -35554,42 +30580,52 @@ class ContainerClient extends StorageClient { * * ```js * let i = 1; - * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * - * // Prints 2 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); * } * } * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken - * iterator = containerClient - * .findBlobsByTags("tagkey='tagvalue'") + * iterator = blobServiceClient + * .listContainers() * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * - * // Prints blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); * } * } * ``` * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to find blobs by tags. + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { - // AsyncIterableIterator to iterate over blobs - const listSegmentOptions = Object.assign({}, options); - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; + } + const include = []; + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSystem) { + include.push("system"); + } + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); return { /** * The next method, part of the iteration protocol @@ -35607,65 +30643,41 @@ class ContainerClient extends StorageClient { * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); }, }; } - getContainerNameFromUrl() { - let containerName; - try { - // URL may look like the following - // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; - // "https://myaccount.blob.core.windows.net/mycontainer"; - // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` - // http://localhost:10001/devstoreaccount1/containername - const parsedUrl = coreHttp.URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { - // "https://myaccount.blob.core.windows.net/containername". - // "https://customdomain.com/containername". - // .getPath() -> /containername - containerName = parsedUrl.getPath().split("/")[1]; - } - else if (isIpEndpointStyle(parsedUrl)) { - // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername - // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername - // .getPath() -> /devstoreaccount1/containername - containerName = parsedUrl.getPath().split("/")[2]; - } - else { - // "https://customdomain.com/containername". - // .getPath() -> /containername - containerName = parsedUrl.getPath().split("/")[1]; - } - // decode the encoded containerName - to get all the special characters that might be present in it - containerName = decodeURIComponent(containerName); - if (!containerName) { - throw new Error("Provided containerName is invalid."); - } - return containerName; - } - catch (error) { - throw new Error("Unable to extract containerName with provided information."); - } - } /** - * Only available for ContainerClient constructed with a shared key credential. + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). * - * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; }); } /** @@ -35673,1315 +30685,863 @@ class ContainerClient extends StorageClient { * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch * - * @returns A new BlobBatchClient object for this container. + * @returns A new BlobBatchClient object for this service. */ getBlobBatchClient() { return new BlobBatchClient(this.url, this.pipeline); } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ +exports.KnownEncryptionAlgorithmType = void 0; +(function (KnownEncryptionAlgorithmType) { + KnownEncryptionAlgorithmType["AES256"] = "AES256"; +})(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {})); + +Object.defineProperty(exports, "RestError", ({ + enumerable: true, + get: function () { return coreRestPipeline.RestError; } +})); +exports.AccountSASPermissions = AccountSASPermissions; +exports.AccountSASResourceTypes = AccountSASResourceTypes; +exports.AccountSASServices = AccountSASServices; +exports.AnonymousCredential = AnonymousCredential; +exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; +exports.AppendBlobClient = AppendBlobClient; +exports.BaseRequestPolicy = BaseRequestPolicy; +exports.BlobBatch = BlobBatch; +exports.BlobBatchClient = BlobBatchClient; +exports.BlobClient = BlobClient; +exports.BlobLeaseClient = BlobLeaseClient; +exports.BlobSASPermissions = BlobSASPermissions; +exports.BlobServiceClient = BlobServiceClient; +exports.BlockBlobClient = BlockBlobClient; +exports.ContainerClient = ContainerClient; +exports.ContainerSASPermissions = ContainerSASPermissions; +exports.Credential = Credential; +exports.CredentialPolicy = CredentialPolicy; +exports.PageBlobClient = PageBlobClient; +exports.Pipeline = Pipeline; +exports.SASQueryParameters = SASQueryParameters; +exports.StorageBrowserPolicy = StorageBrowserPolicy; +exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; +exports.StorageOAuthScopes = StorageOAuthScopes; +exports.StorageRetryPolicy = StorageRetryPolicy; +exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; +exports.StorageSharedKeyCredential = StorageSharedKeyCredential; +exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; +exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; +exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; +exports.getBlobServiceAccountAudience = getBlobServiceAccountAudience; +exports.isPipelineLike = isPipelineLike; +exports.logger = logger; +exports.newPipeline = newPipeline; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 38426: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +const REGEX_IS_INSTALLATION = /^ghs_/; +const REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; +} + /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. + * Prefix token for usage in the Authorization header * - * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value - * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the - * values are set, this should be serialized with toString and set as the permissions field on an - * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but - * the order of the permissions is particular and this class guarantees correctness. + * @param token OAuth token or JSON Web Token */ -class AccountSASPermissions { - constructor() { - /** - * Permission to read resources and list queues and tables granted. - */ - this.read = false; - /** - * Permission to write resources granted. - */ - this.write = false; - /** - * Permission to create blobs and files granted. - */ - this.delete = false; - /** - * Permission to delete versions granted. - */ - this.deleteVersion = false; - /** - * Permission to list blob containers, blobs, shares, directories, and files granted. - */ - this.list = false; - /** - * Permission to add messages, table entities, and append to blobs granted. - */ - this.add = false; - /** - * Permission to create blobs and files granted. - */ - this.create = false; - /** - * Permissions to update messages and table entities granted. - */ - this.update = false; - /** - * Permission to get and delete messages granted. - */ - this.process = false; - /** - * Specfies Tag access granted. - */ - this.tag = false; - /** - * Permission to filter blobs. - */ - this.filter = false; - /** - * Permission to set immutability policy. - */ - this.setImmutabilityPolicy = false; - /** - * Specifies that Permanent Delete is permitted. - */ - this.permanentDelete = false; - } - /** - * Parse initializes the AccountSASPermissions fields from a string. - * - * @param permissions - - */ - static parse(permissions) { - const accountSASPermissions = new AccountSASPermissions(); - for (const c of permissions) { - switch (c) { - case "r": - accountSASPermissions.read = true; - break; - case "w": - accountSASPermissions.write = true; - break; - case "d": - accountSASPermissions.delete = true; - break; - case "x": - accountSASPermissions.deleteVersion = true; - break; - case "l": - accountSASPermissions.list = true; - break; - case "a": - accountSASPermissions.add = true; - break; - case "c": - accountSASPermissions.create = true; - break; - case "u": - accountSASPermissions.update = true; - break; - case "p": - accountSASPermissions.process = true; - break; - case "t": - accountSASPermissions.tag = true; - break; - case "f": - accountSASPermissions.filter = true; - break; - case "i": - accountSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - accountSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission character: ${c}`); - } - } - return accountSASPermissions; - } - /** - * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. - * - * @param permissionLike - - */ - static from(permissionLike) { - const accountSASPermissions = new AccountSASPermissions(); - if (permissionLike.read) { - accountSASPermissions.read = true; - } - if (permissionLike.write) { - accountSASPermissions.write = true; - } - if (permissionLike.delete) { - accountSASPermissions.delete = true; - } - if (permissionLike.deleteVersion) { - accountSASPermissions.deleteVersion = true; - } - if (permissionLike.filter) { - accountSASPermissions.filter = true; - } - if (permissionLike.tag) { - accountSASPermissions.tag = true; - } - if (permissionLike.list) { - accountSASPermissions.list = true; - } - if (permissionLike.add) { - accountSASPermissions.add = true; - } - if (permissionLike.create) { - accountSASPermissions.create = true; - } - if (permissionLike.update) { - accountSASPermissions.update = true; - } - if (permissionLike.process) { - accountSASPermissions.process = true; - } - if (permissionLike.setImmutabilityPolicy) { - accountSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - accountSASPermissions.permanentDelete = true; - } - return accountSASPermissions; - } - /** - * Produces the SAS permissions string for an Azure Storage account. - * Call this method to set AccountSASSignatureValues Permissions field. - * - * Using this method will guarantee the resource types are in - * an order accepted by the service. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas - * - */ - toString() { - // The order of the characters should be as specified here to ensure correctness: - // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas - // Use a string array instead of string concatenating += operator for performance - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.filter) { - permissions.push("f"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.list) { - permissions.push("l"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.update) { - permissions.push("u"); - } - if (this.process) { - permissions.push("p"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - return permissions.join(""); - } +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + + return `token ${token}`; } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value - * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the - * values are set, this should be serialized with toString and set as the resources field on an - * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but - * the order of the resources is particular and this class guarantees correctness. - */ -class AccountSASResourceTypes { - constructor() { - /** - * Permission to access service level APIs granted. - */ - this.service = false; - /** - * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. - */ - this.container = false; - /** - * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. - */ - this.object = false; - } - /** - * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an - * Error if it encounters a character that does not correspond to a valid resource type. - * - * @param resourceTypes - - */ - static parse(resourceTypes) { - const accountSASResourceTypes = new AccountSASResourceTypes(); - for (const c of resourceTypes) { - switch (c) { - case "s": - accountSASResourceTypes.service = true; - break; - case "c": - accountSASResourceTypes.container = true; - break; - case "o": - accountSASResourceTypes.object = true; - break; - default: - throw new RangeError(`Invalid resource type: ${c}`); - } - } - return accountSASResourceTypes; - } - /** - * Converts the given resource types to a string. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas - * - */ - toString() { - const resourceTypes = []; - if (this.service) { - resourceTypes.push("s"); - } - if (this.container) { - resourceTypes.push("c"); - } - if (this.object) { - resourceTypes.push("o"); - } - return resourceTypes.join(""); - } +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value - * to true means that any SAS which uses these permissions will grant access to that service. Once all the - * values are set, this should be serialized with toString and set as the services field on an - * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but - * the order of the services is particular and this class guarantees correctness. - */ -class AccountSASServices { - constructor() { - /** - * Permission to access blob resources granted. - */ - this.blob = false; - /** - * Permission to access file resources granted. - */ - this.file = false; - /** - * Permission to access queue resources granted. - */ - this.queue = false; - /** - * Permission to access table resources granted. - */ - this.table = false; - } - /** - * Creates an {@link AccountSASServices} from the specified services string. This method will throw an - * Error if it encounters a character that does not correspond to a valid service. - * - * @param services - - */ - static parse(services) { - const accountSASServices = new AccountSASServices(); - for (const c of services) { - switch (c) { - case "b": - accountSASServices.blob = true; - break; - case "f": - accountSASServices.file = true; - break; - case "q": - accountSASServices.queue = true; - break; - case "t": - accountSASServices.table = true; - break; - default: - throw new RangeError(`Invalid service character: ${c}`); - } - } - return accountSASServices; - } - /** - * Converts the given services to a string. - * - */ - toString() { - const services = []; - if (this.blob) { - services.push("b"); - } - if (this.table) { - services.push("t"); - } - if (this.queue) { - services.push("q"); - } - if (this.file) { - services.push("f"); - } - return services.join(""); - } +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } + + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; + +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 6461: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var universalUserAgent = __nccwpck_require__(37081); +var beforeAfterHook = __nccwpck_require__(73108); +var request = __nccwpck_require__(63986); +var graphql = __nccwpck_require__(51463); +var authToken = __nccwpck_require__(38426); + +function _objectWithoutPropertiesLoose(source, excluded) { + if (source == null) return {}; + var target = {}; + var sourceKeys = Object.keys(source); + var key, i; + + for (i = 0; i < sourceKeys.length; i++) { + key = sourceKeys[i]; + if (excluded.indexOf(key) >= 0) continue; + target[key] = source[key]; + } + + return target; } -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual - * REST request. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas - * - * @param accountSASSignatureValues - - * @param sharedKeyCredential - - */ -function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { - const version = accountSASSignatureValues.version - ? accountSASSignatureValues.version - : SERVICE_VERSION; - if (accountSASSignatureValues.permissions && - accountSASSignatureValues.permissions.setImmutabilityPolicy && - version < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); - } - if (accountSASSignatureValues.permissions && - accountSASSignatureValues.permissions.deleteVersion && - version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); - } - if (accountSASSignatureValues.permissions && - accountSASSignatureValues.permissions.permanentDelete && - version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); - } - if (accountSASSignatureValues.permissions && - accountSASSignatureValues.permissions.tag && - version < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); - } - if (accountSASSignatureValues.permissions && - accountSASSignatureValues.permissions.filter && - version < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); +function _objectWithoutProperties(source, excluded) { + if (source == null) return {}; + + var target = _objectWithoutPropertiesLoose(source, excluded); + + var key, i; + + if (Object.getOwnPropertySymbols) { + var sourceSymbolKeys = Object.getOwnPropertySymbols(source); + + for (i = 0; i < sourceSymbolKeys.length; i++) { + key = sourceSymbolKeys[i]; + if (excluded.indexOf(key) >= 0) continue; + if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; + target[key] = source[key]; } - if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + + return target; +} + +const VERSION = "3.6.0"; + +const _excluded = ["authStrategy"]; +class Octokit { + constructor(options = {}) { + const hook = new beforeAfterHook.Collection(); + const requestDefaults = { + baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; // prepend default user agent with `options.userAgent` if set + + requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); + + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; } - const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); - const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); - const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - let stringToSign; - if (version >= "2020-12-06") { - stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn - ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) - : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version, - accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", - "", // Account SAS requires an additional newline character - ].join("\n"); + + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; } - else { - stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn - ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) - : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version, - "", // Account SAS requires an additional newline character - ].join("\n"); + + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; } - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); -} -/** - * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you - * to manipulate blob containers. - */ -class BlobServiceClient extends StorageClient { - constructor(url, credentialOrPipeline, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - let pipeline; - if (isPipelineLike(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } - else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || - credentialOrPipeline instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipeline)) { - pipeline = newPipeline(credentialOrPipeline, options); - } - else { - // The second parameter is undefined. Use anonymous credential - pipeline = newPipeline(new AnonymousCredential(), options); + this.request = request.request.defaults(requestDefaults); + this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); + this.log = Object.assign({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, options.log); + this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. + + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + // (2) + const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { + authStrategy + } = options, + otherOptions = _objectWithoutProperties(options, _excluded); + + const auth = authStrategy(Object.assign({ + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } // apply plugins + // https://stackoverflow.com/a/16345172 + + + const classConstructor = this.constructor; + classConstructor.plugins.forEach(plugin => { + Object.assign(this, plugin(this, options)); + }); + } + + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + + if (typeof defaults === "function") { + super(defaults(options)); + return; } - super(url, pipeline); - this.serviceContext = new Service(this.storageClientContext); + + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null)); + } + + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + + + static plugin(...newPlugins) { + var _a; + + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); + return NewOctokit; + } + +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; + +exports.Octokit = Octokit; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 92995: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var isPlainObject = __nccwpck_require__(63032); +var universalUserAgent = __nccwpck_require__(37081); + +function lowercaseKeys(object) { + if (!object) { + return {}; + } + + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach(key => { + if (isPlainObject.isPlainObject(options[key])) { + if (!(key in defaults)) Object.assign(result, { + [key]: options[key] + });else result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { + [key]: options[key] + }); } - /** - * - * Creates an instance of BlobServiceClient from connection string. - * - * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. - * [ Note - Account connection string can only be used in NODE.JS runtime. ] - * Account connection string example - - * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` - * SAS connection string example - - * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` - * @param options - Optional. Options to configure the HTTP pipeline. - */ - static fromConnectionString(connectionString, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - options = options || {}; - const extractedCreds = extractConnectionStringParts(connectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - const pipeline = newPipeline(sharedKeyCredential, options); - return new BlobServiceClient(extractedCreds.url, pipeline); - } - else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } - else if (extractedCreds.kind === "SASConnString") { - const pipeline = newPipeline(new AnonymousCredential(), options); - return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); - } - else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } + }); + return result; +} + +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === undefined) { + delete obj[key]; } - /** - * Creates a {@link ContainerClient} object - * - * @param containerName - A container name - * @returns A new ContainerClient object for the given container name. - * - * Example usage: - * - * ```js - * const containerClient = blobServiceClient.getContainerClient(""); - * ``` - */ - getContainerClient(containerName) { - return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + } + + return obj; +} + +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { + method, + url + } : { + url: method + }, options); + } else { + options = Object.assign({}, route); + } // lowercase header names before merging with defaults to avoid duplicates + + + options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging + + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten + + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } + + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); + return mergedOptions; +} + +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + + if (names.length === 0) { + return url; + } + + return url + separator + names.map(name => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); } - /** - * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container - * - * @param containerName - Name of the container to create. - * @param options - Options to configure Container Create operation. - * @returns Container creation response and the corresponding container client. - */ - async createContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); - try { - const containerClient = this.getContainerClient(containerName); - const containerCreateResponse = await containerClient.create(updatedOptions); - return { - containerClient, - containerCreateResponse, - }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +const urlVariableRegex = /\{[^}]+\}/g; + +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} + +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + + if (!matches) { + return []; + } + + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +function omit(object, keysToOmit) { + return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} + +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* istanbul ignore file */ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); } - /** - * Deletes a Blob container. - * - * @param containerName - Name of the container to delete. - * @param options - Options to configure Container Delete operation. - * @returns Container deletion response. - */ - async deleteContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); - try { - const containerClient = this.getContainerClient(containerName); - return await containerClient.delete(updatedOptions); + + return part; + }).join(""); +} + +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} + +function isDefined(value) { + return value !== undefined && value !== null; +} + +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} + +function getValues(context, operator, key, modifier) { + var value = context[key], + result = []; + + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; + } else { + const tmp = []; + + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); } - finally { - span.end(); + + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); } + } } - /** - * Restore a previously deleted Blob container. - * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. - * - * @param deletedContainerName - Name of the previously deleted container. - * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. - * @param options - Options to configure Container Restore operation. - * @returns Container deletion response. - */ - async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); - try { - const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); - // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, - deletedContainerVersion }, updatedOptions)); - return { containerClient, containerUndeleteResponse }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); } - /** - * Rename an existing Blob Container. - * - * @param sourceContainerName - The name of the source container. - * @param destinationContainerName - The new name of the container. - * @param options - Options to configure Container Rename operation. - */ - /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ - // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. - async renameContainer(sourceContainerName, destinationContainerName, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); - try { - const containerClient = this.getContainerClient(destinationContainerName); - // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); - return { containerClient, containerRenameResponse }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Gets the properties of a storage account’s Blob service, including properties - * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties - * - * @param options - Options to the Service Get Properties operation. - * @returns Response data for the Service Get Properties operation. - */ - async getProperties(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); - try { - return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); + } + + return result; +} + +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} + +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + + if (operator && operator !== "+") { + var separator = ","; + + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; } + + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); } - /** - * Sets properties for a storage account’s Blob service endpoint, including properties - * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties - * - * @param properties - - * @param options - Options to the Service Set Properties operation. - * @returns Response data for the Service Set Properties operation. - */ - async setProperties(properties, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); - try { - return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); +} + +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible + + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later + + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + + const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + + if (!isBinaryRequest) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); } - /** - * Retrieves statistics related to replication for the Blob service. It is only - * available on the secondary location endpoint when read-access geo-redundant - * replication is enabled for the storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats - * - * @param options - Options to the Service Get Statistics operation. - * @returns Response data for the Service Get Statistics operation. - */ - async getStatistics(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); - try { - return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); } - /** - * The Get Account Information operation returns the sku name and account kind - * for the specified account. - * The Get Account Information operation is available on service versions beginning - * with version 2018-03-28. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information - * - * @param options - Options to the Service Get Account Info operation. - * @returns Response data for the Service Get Account Info operation. - */ - async getAccountInfo(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); - try { - return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + } // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters + + + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } else { + headers["content-length"] = 0; + } } - /** - * Returns a list of the containers under the specified account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 - * - * @param marker - A string value that identifies the portion of - * the list of containers to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all containers remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to the Service List Container Segment operation. - * @returns Response data for the Service List Container Segment operation. - */ - async listContainersSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); - try { - return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + } // default content-type for JSON if body is set + + + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string + + + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } // Only return body/request keys if present + + + return Object.assign({ + method, + url, + headers + }, typeof body !== "undefined" ? { + body + } : null, options.request ? { + request: options.request + } : null); +} + +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); +} + +const VERSION = "6.0.12"; + +const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. + +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "", + previews: [] + } +}; + +const endpoint = withDefaults(null, DEFAULTS); + +exports.endpoint = endpoint; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 51463: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var request = __nccwpck_require__(63986); +var universalUserAgent = __nccwpck_require__(37081); + +const VERSION = "4.8.0"; + +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); +} + +class GraphqlResponseError extends Error { + constructor(request, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. + + this.errors = response.errors; + this.data = response.data; // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); } - /** - * The Filter Blobs operation enables callers to list blobs across all containers whose tags - * match a given search expression. Filter blobs searches across all containers within a - * storage account but can be scoped within the expression to a single container. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); - try { - const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a; - let tagValue = ""; - if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + } + +} + +const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; +const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); } - /** - * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { - let response; - if (!!marker || marker === undefined) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); - response.blobs = response.blobs || []; - marker = response.continuationToken; - yield yield tslib.__await(response); - } while (marker); - } - }); + + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; + return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); } - /** - * Returns an AsyncIterableIterator for blobs. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to findBlobsByTagsItems. - */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { - var e_1, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } - finally { if (e_1) throw e_1.error; } - } - }); + } + + const parsedOptions = typeof query === "string" ? Object.assign({ + query + }, options) : query; + const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; } - /** - * Returns an async iterable iterator to find all blobs with specified tag - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties - * - * Example using `for await` syntax: - * - * ```js - * let i = 1; - * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${container.name}`); - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let i = 1; - * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * } - * ``` - * - * Example using paging with a marker: - * - * ```js - * let i = 1; - * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = blobServiceClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * - * // Prints blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to find blobs by tags. - */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { - // AsyncIterableIterator to iterate over blobs - const listSegmentOptions = Object.assign({}, options); - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - }, - }; + + if (!result.variables) { + result.variables = {}; } - /** - * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses - * - * @param marker - A string value that identifies the portion of - * the list of containers to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all containers remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list containers operation. - */ - listSegments(marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { - let listContainersSegmentResponse; - if (!!marker || marker === undefined) { - do { - listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); - listContainersSegmentResponse.containerItems = - listContainersSegmentResponse.containerItems || []; - marker = listContainersSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); - } while (marker); - } - }); - } - /** - * Returns an AsyncIterableIterator for Container Items - * - * @param options - Options to list containers operation. - */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItems_1() { - var e_2, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); - } - } - catch (e_2_1) { e_2 = { error: e_2_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } - finally { if (e_2) throw e_2.error; } - } - }); - } - /** - * Returns an async iterable iterator to list all the containers - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the containers in pages. - * - * Example using `for await` syntax: - * - * ```js - * let i = 1; - * for await (const container of blobServiceClient.listContainers()) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let i = 1; - * const iter = blobServiceClient.listContainers(); - * let containerItem = await iter.next(); - * while (!containerItem.done) { - * console.log(`Container ${i++}: ${containerItem.value.name}`); - * containerItem = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * } - * ``` - * - * Example using paging with a marker: - * - * ```js - * let i = 1; - * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 container names - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = blobServiceClient - * .listContainers() - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * - * // Prints 10 container names - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * ``` - * - * @param options - Options to list containers. - * @returns An asyncIterableIterator that supports paging. - */ - listContainers(options = {}) { - if (options.prefix === "") { - options.prefix = undefined; - } - const include = []; - if (options.includeDeleted) { - include.push("deleted"); - } - if (options.includeMetadata) { - include.push("metadata"); - } - if (options.includeSystem) { - include.push("system"); - } - // AsyncIterableIterator to iterate over containers - const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); - const iter = this.listItems(listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - }, - }; - } - /** - * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). - * - * Retrieves a user delegation key for the Blob service. This is only a valid operation when using - * bearer token authentication. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key - * - * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time - * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time - */ - async getUserDelegationKey(startsOn, expiresOn, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); - try { - const response = await this.serviceContext.getUserDelegationKey({ - startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn, false), - }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - const userDelegationKey = { - signedObjectId: response.signedObjectId, - signedTenantId: response.signedTenantId, - signedStartsOn: new Date(response.signedStartsOn), - signedExpiresOn: new Date(response.signedExpiresOn), - signedService: response.signedService, - signedVersion: response.signedVersion, - value: response.value, - }; - const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } - } - /** - * Creates a BlobBatchClient object to conduct batch operations. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch - * - * @returns A new BlobBatchClient object for this service. - */ - getBlobBatchClient() { - return new BlobBatchClient(this.url, this.pipeline); - } - /** - * Only available for BlobServiceClient constructed with a shared key credential. - * - * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas - * - * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. - * @param permissions - Specifies the list of permissions to be associated with the SAS. - * @param resourceTypes - Specifies the resource types associated with the shared access signature. - * @param options - Optional parameters. - * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); - } - if (expiresOn === undefined) { - const now = new Date(); - expiresOn = new Date(now.getTime() + 3600 * 1000); - } - const sas = generateAccountSASQueryParameters(Object.assign({ permissions, - expiresOn, - resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); - return appendToURLQuery(this.url, sas); + + result.variables[key] = parsedOptions[key]; + return result; + }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix + // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 + + const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; + + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + + return request(requestOptions).then(response => { + if (response.data.errors) { + const headers = {}; + + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + + throw new GraphqlResponseError(requestOptions, headers, response.data); } + + return response.data.data; + }); } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ -exports.KnownEncryptionAlgorithmType = void 0; -(function (KnownEncryptionAlgorithmType) { - KnownEncryptionAlgorithmType["AES256"] = "AES256"; -})(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {})); +function withDefaults(request$1, newDefaults) { + const newRequest = request$1.defaults(newDefaults); -Object.defineProperty(exports, "BaseRequestPolicy", ({ - enumerable: true, - get: function () { return coreHttp.BaseRequestPolicy; } -})); -Object.defineProperty(exports, "HttpHeaders", ({ - enumerable: true, - get: function () { return coreHttp.HttpHeaders; } -})); -Object.defineProperty(exports, "RequestPolicyOptions", ({ - enumerable: true, - get: function () { return coreHttp.RequestPolicyOptions; } -})); -Object.defineProperty(exports, "RestError", ({ - enumerable: true, - get: function () { return coreHttp.RestError; } -})); -Object.defineProperty(exports, "WebResource", ({ - enumerable: true, - get: function () { return coreHttp.WebResource; } -})); -Object.defineProperty(exports, "deserializationPolicy", ({ - enumerable: true, - get: function () { return coreHttp.deserializationPolicy; } -})); -exports.AccountSASPermissions = AccountSASPermissions; -exports.AccountSASResourceTypes = AccountSASResourceTypes; -exports.AccountSASServices = AccountSASServices; -exports.AnonymousCredential = AnonymousCredential; -exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; -exports.AppendBlobClient = AppendBlobClient; -exports.BlobBatch = BlobBatch; -exports.BlobBatchClient = BlobBatchClient; -exports.BlobClient = BlobClient; -exports.BlobLeaseClient = BlobLeaseClient; -exports.BlobSASPermissions = BlobSASPermissions; -exports.BlobServiceClient = BlobServiceClient; -exports.BlockBlobClient = BlockBlobClient; -exports.ContainerClient = ContainerClient; -exports.ContainerSASPermissions = ContainerSASPermissions; -exports.Credential = Credential; -exports.CredentialPolicy = CredentialPolicy; -exports.PageBlobClient = PageBlobClient; -exports.Pipeline = Pipeline; -exports.SASQueryParameters = SASQueryParameters; -exports.StorageBrowserPolicy = StorageBrowserPolicy; -exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; -exports.StorageOAuthScopes = StorageOAuthScopes; -exports.StorageRetryPolicy = StorageRetryPolicy; -exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; -exports.StorageSharedKeyCredential = StorageSharedKeyCredential; -exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; -exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; -exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; -exports.getBlobServiceAccountAudience = getBlobServiceAccountAudience; -exports.isPipelineLike = isPipelineLike; -exports.logger = logger; -exports.newPipeline = newPipeline; + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: request.request.endpoint + }); +} + +const graphql$1 = withDefaults(request.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} + +exports.GraphqlResponseError = GraphqlResponseError; +exports.graphql = graphql$1; +exports.withCustomRequest = withCustomRequest; //# sourceMappingURL=index.js.map /***/ }), -/***/ 55441: +/***/ 49883: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -36989,1079 +31549,308 @@ exports.newPipeline = newPipeline; Object.defineProperty(exports, "__esModule", ({ value: true })); -const REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -const REGEX_IS_INSTALLATION = /^ghs_/; -const REGEX_IS_USER_TO_SERVER = /^ghu_/; -async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} +const VERSION = "2.21.3"; -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + enumerableOnly && (symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + })), keys.push.apply(keys, symbols); } - return `token ${token}`; + return keys; } -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = null != arguments[i] ? arguments[i] : {}; + i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { + _defineProperty(target, key, source[key]); + }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + + return target; } -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; } - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } + return obj; +} - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +function normalizePaginatedListResponse(response) { + // endpoints can respond with 204 if repository is empty + if (!response.data) { + return _objectSpread2(_objectSpread2({}, response), {}, { + data: [] + }); + } -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way + // to retrieve the same information. + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; -/***/ }), + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } -/***/ 72762: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } -"use strict"; + response.data.total_count = totalCount; + return response; +} +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) return { + done: true + }; -Object.defineProperty(exports, "__esModule", ({ value: true })); + try { + const response = await requestMethod({ + method, + url, + headers + }); + const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set -var universalUserAgent = __nccwpck_require__(38151); -var beforeAfterHook = __nccwpck_require__(5015); -var request = __nccwpck_require__(33825); -var graphql = __nccwpck_require__(84839); -var authToken = __nccwpck_require__(55441); + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: normalizedResponse + }; + } catch (error) { + if (error.status !== 409) throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } -function _objectWithoutPropertiesLoose(source, excluded) { - if (source == null) return {}; - var target = {}; - var sourceKeys = Object.keys(source); - var key, i; + }) + }; +} - for (i = 0; i < sourceKeys.length; i++) { - key = sourceKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - target[key] = source[key]; +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; } - return target; + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); } -function _objectWithoutProperties(source, excluded) { - if (source == null) return {}; +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then(result => { + if (result.done) { + return results; + } - var target = _objectWithoutPropertiesLoose(source, excluded); + let earlyExit = false; - var key, i; + function done() { + earlyExit = true; + } - if (Object.getOwnPropertySymbols) { - var sourceSymbolKeys = Object.getOwnPropertySymbols(source); + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - for (i = 0; i < sourceSymbolKeys.length; i++) { - key = sourceSymbolKeys[i]; - if (excluded.indexOf(key) >= 0) continue; - if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; - target[key] = source[key]; + if (earlyExit) { + return results; } - } - return target; + return gather(octokit, results, iterator, mapFn); + }); } -const VERSION = "3.6.0"; +const composePaginateRest = Object.assign(paginate, { + iterator +}); -const _excluded = ["authStrategy"]; -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set +const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } - - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; - } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const { - authStrategy - } = options, - otherOptions = _objectWithoutProperties(options, _excluded); - - const auth = authStrategy(Object.assign({ - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(...newPlugins) { - var _a; - - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; } -Octokit.VERSION = VERSION; -Octokit.plugins = []; +paginateRest.VERSION = VERSION; -exports.Octokit = Octokit; +exports.composePaginateRest = composePaginateRest; +exports.isPaginatingEndpoint = isPaginatingEndpoint; +exports.paginateRest = paginateRest; +exports.paginatingEndpoints = paginatingEndpoints; //# sourceMappingURL=index.js.map /***/ }), -/***/ 67273: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 67823: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -var isPlainObject = __nccwpck_require__(21671); -var universalUserAgent = __nccwpck_require__(38151); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } +const VERSION = "1.0.4"; - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } +function requestLog(octokit) { + octokit.hook.wrap("request", (request, options) => { + octokit.log.debug("request", options); + const start = Date.now(); + const requestOptions = octokit.request.endpoint.parse(options); + const path = requestOptions.url.replace(options.baseUrl, ""); + return request(options).then(response => { + octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`); + return response; + }).catch(error => { + octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`); + throw error; + }); }); - return result; -} - -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === undefined) { - delete obj[key]; - } - } - - return obj; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; } +requestLog.VERSION = VERSION; -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } +exports.requestLog = requestLog; +//# sourceMappingURL=index.js.map - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} -const urlVariableRegex = /\{[^}]+\}/g; +/***/ }), -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} +/***/ 96752: +/***/ ((__unused_webpack_module, exports) => { -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); +"use strict"; - if (!matches) { - return []; - } - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + if (enumerableOnly) { + symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); } - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } + keys.push.apply(keys, symbols); } - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; + return keys; } -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { - return encodeReserved(literal); + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; } - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequest) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); + return target; } -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.12"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 84839: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -var request = __nccwpck_require__(33825); -var universalUserAgent = __nccwpck_require__(38151); - -const VERSION = "4.8.0"; - -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n"); -} - -class GraphqlResponseError extends Error { - constructor(request, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties. - - this.errors = response.errors; - this.data = response.data; // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } - -} - -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); - } - - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; - return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`)); - } - } - - const parsedOptions = typeof query === "string" ? Object.assign({ - query - }, options) : query; - const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - - if (!result.variables) { - result.variables = {}; - } - - result.variables[key] = parsedOptions[key]; - return result; - }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix - // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 - - const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; - - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - - return request(requestOptions).then(response => { - if (response.data.errors) { - const headers = {}; - - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - - throw new GraphqlResponseError(requestOptions, headers, response.data); - } - - return response.data.data; - }); -} - -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); - - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} - -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -exports.GraphqlResponseError = GraphqlResponseError; -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 22517: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "2.19.0"; - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - enumerableOnly && (symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - })), keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = null != arguments[i] ? arguments[i] : {}; - i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { - _defineProperty(target, key, source[key]); - }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ -function normalizePaginatedListResponse(response) { - // endpoints can respond with 204 if repository is empty - if (!response.data) { - return _objectSpread2(_objectSpread2({}, response), {}, { - data: [] - }); - } - - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; -} - -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) return { - done: true - }; - - try { - const response = await requestMethod({ - method, - url, - headers - }); - const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - - url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: normalizedResponse - }; - } catch (error) { - if (error.status !== 409) throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; - } - - let earlyExit = false; - - function done() { - earlyExit = true; - } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; - } - - return gather(octokit, results, iterator, mapFn); - }); -} - -const composePaginateRest = Object.assign(paginate, { - iterator -}); - -const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/actions/runners/downloads", "GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /organizations/{organization_id}/custom_roles", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/runners/downloads", "GET /orgs/{org}/actions/runners/{runner_id}/labels", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/external-groups", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runners/downloads", "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/autolinks", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/tags/protection", "GET /repos/{owner}/{repo}/teams", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /scim/v2/enterprises/{enterprise}/Groups", "GET /scim/v2/enterprises/{enterprise}/Users", "GET /scim/v2/organizations/{org}/Users", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/team-sync/group-mappings", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/codespaces/secrets/{secret_name}/repositories", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; - -function isPaginatingEndpoint(arg) { - if (typeof arg === "string") { - return paginatingEndpoints.includes(arg); - } else { - return false; - } -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION; - -exports.composePaginateRest = composePaginateRest; -exports.isPaginatingEndpoint = isPaginatingEndpoint; -exports.paginateRest = paginateRest; -exports.paginatingEndpoints = paginatingEndpoints; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 81160: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -const VERSION = "1.0.4"; - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function requestLog(octokit) { - octokit.hook.wrap("request", (request, options) => { - octokit.log.debug("request", options); - const start = Date.now(); - const requestOptions = octokit.request.endpoint.parse(options); - const path = requestOptions.url.replace(options.baseUrl, ""); - return request(options).then(response => { - octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`); - return response; - }).catch(error => { - octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`); - throw error; - }); - }); -} -requestLog.VERSION = VERSION; - -exports.requestLog = requestLog; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 31478: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - - if (enumerableOnly) { - symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - } - - keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } - - return target; -} - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; + return obj; } const Endpoints = { @@ -38079,6 +31868,8 @@ const Endpoints = { createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], + deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"], + deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"], deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], @@ -38095,6 +31886,7 @@ const Endpoints = { downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"], getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"], @@ -38300,6 +32092,7 @@ const Endpoints = { createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"], createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"], deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"], deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"], deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"], exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"], @@ -38311,6 +32104,11 @@ const Endpoints = { getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"], listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"], listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: ["GET /orgs/{org}/codespaces", {}, { + renamedParameters: { + org_id: "org" + } + }], listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"], listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"], @@ -38320,6 +32118,7 @@ const Endpoints = { setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"], startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"], updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] }, dependabot: { @@ -38339,6 +32138,7 @@ const Endpoints = { setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"] }, dependencyGraph: { + createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"], diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"] }, emojis: { @@ -39002,7 +32802,7 @@ const Endpoints = { } }; -const VERSION = "5.15.0"; +const VERSION = "5.16.2"; function endpointsToMethods(octokit, endpointsMap) { const newMethods = {}; @@ -39107,7 +32907,7 @@ exports.restEndpointMethods = restEndpointMethods; /***/ }), -/***/ 35336: +/***/ 57293: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -39117,7 +32917,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } -var Bottleneck = _interopDefault(__nccwpck_require__(35553)); +var Bottleneck = _interopDefault(__nccwpck_require__(59111)); // @ts-ignore async function errorRequest(octokit, state, error, options) { @@ -39191,7 +32991,7 @@ exports.retry = retry; /***/ }), -/***/ 37382: +/***/ 63986: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -39201,93 +33001,11 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } -var deprecation = __nccwpck_require__(76604); -var once = _interopDefault(__nccwpck_require__(53183)); - -const logOnceCode = once(deprecation => console.warn(deprecation)); -const logOnceHeaders = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = "HttpError"; - this.status = statusCode; - let headers; - - if ("headers" in options && typeof options.headers !== "undefined") { - headers = options.headers; - } - - if ("response" in options) { - this.response = options.response; - headers = options.response.headers; - } // redact request credentials without mutating original request options - - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; // deprecations - - Object.defineProperty(this, "code", { - get() { - logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - Object.defineProperty(this, "headers", { - get() { - logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); - return headers || {}; - } - - }); - } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 33825: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var endpoint = __nccwpck_require__(67273); -var universalUserAgent = __nccwpck_require__(38151); -var isPlainObject = __nccwpck_require__(21671); -var nodeFetch = _interopDefault(__nccwpck_require__(26006)); -var requestError = __nccwpck_require__(37382); +var endpoint = __nccwpck_require__(92995); +var universalUserAgent = __nccwpck_require__(37081); +var isPlainObject = __nccwpck_require__(63032); +var nodeFetch = _interopDefault(__nccwpck_require__(38534)); +var requestError = __nccwpck_require__(88379); const VERSION = "5.6.3"; @@ -39458,10431 +33176,9540 @@ exports.request = request; /***/ }), -/***/ 14430: +/***/ 88379: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ + Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ContextAPI = void 0; -const NoopContextManager_1 = __nccwpck_require__(63571); -const global_utils_1 = __nccwpck_require__(27110); -const diag_1 = __nccwpck_require__(30156); -const API_NAME = 'context'; -const NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var deprecation = __nccwpck_require__(25800); +var once = _interopDefault(__nccwpck_require__(88666)); + +const logOnceCode = once(deprecation => console.warn(deprecation)); +const logOnceHeaders = once(deprecation => console.warn(deprecation)); /** - * Singleton object which represents the entry point to the OpenTelemetry Context API + * Error with extra properties to help with debugging */ -class ContextAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { } - /** Get the singleton instance of the Context API */ - static getInstance() { - if (!this._instance) { - this._instance = new ContextAPI(); - } - return this._instance; - } - /** - * Set the current context manager. - * - * @returns true if the context manager was successfully registered, else false - */ - setGlobalContextManager(contextManager) { - return (0, global_utils_1.registerGlobal)(API_NAME, contextManager, diag_1.DiagAPI.instance()); - } - /** - * Get the currently active context - */ - active() { - return this._getContextManager().active(); - } - /** - * Execute a function with an active context - * - * @param context context to be active during function execution - * @param fn function to execute in a context - * @param thisArg optional receiver to be used for calling fn - * @param args optional arguments forwarded to fn - */ - with(context, fn, thisArg, ...args) { - return this._getContextManager().with(context, fn, thisArg, ...args); - } - /** - * Bind a context to a target function or event emitter - * - * @param context context to bind to the event emitter or function. Defaults to the currently active context - * @param target function or event emitter to bind - */ - bind(context, target) { - return this._getContextManager().bind(context, target); + +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); } - _getContextManager() { - return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_CONTEXT_MANAGER; + + this.name = "HttpError"; + this.status = statusCode; + let headers; + + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; } - /** Disable and remove the global context manager */ - disable() { - this._getContextManager().disable(); - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); + + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } // redact request credentials without mutating original request options + + + const requestCopy = Object.assign({}, options.request); + + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; // deprecations + + Object.defineProperty(this, "code", { + get() { + logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } + + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + } + + }); + } + } -exports.ContextAPI = ContextAPI; -//# sourceMappingURL=context.js.map + +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map + /***/ }), -/***/ 30156: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 37686: +/***/ (function(__unused_webpack_module, exports) { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DiagAPI = void 0; -const ComponentLogger_1 = __nccwpck_require__(47645); -const logLevelLogger_1 = __nccwpck_require__(3484); -const types_1 = __nccwpck_require__(60953); -const global_utils_1 = __nccwpck_require__(27110); -const API_NAME = 'diag'; +exports.ClientStreamingCall = void 0; /** - * Singleton object which represents the entry point to the OpenTelemetry internal - * diagnostic API + * A client streaming RPC call. This means that the clients sends 0, 1, or + * more messages to the server, and the server replies with exactly one + * message. */ -class DiagAPI { +class ClientStreamingCall { + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.requests = request; + this.headers = headers; + this.response = response; + this.status = status; + this.trailers = trailers; + } /** - * Private internal constructor - * @private + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * Note that it may still be valid to send more request messages. */ - constructor() { - function _logProxy(funcName) { - return function (...args) { - const logger = (0, global_utils_1.getGlobal)('diag'); - // shortcut if logger not set - if (!logger) - return; - return logger[funcName](...args); - }; - } - // Using self local variable for minification purposes as 'this' cannot be minified - const self = this; - // DiagAPI specific functions - const setLogger = (logger, optionsOrLogLevel = { logLevel: types_1.DiagLogLevel.INFO }) => { - var _a, _b, _c; - if (logger === self) { - // There isn't much we can do here. - // Logging to the console might break the user application. - // Try to log to self. If a logger was previously registered it will receive the log. - const err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); - self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); - return false; - } - if (typeof optionsOrLogLevel === 'number') { - optionsOrLogLevel = { - logLevel: optionsOrLogLevel, - }; - } - const oldLogger = (0, global_utils_1.getGlobal)('diag'); - const newLogger = (0, logLevelLogger_1.createLogLevelDiagLogger)((_b = optionsOrLogLevel.logLevel) !== null && _b !== void 0 ? _b : types_1.DiagLogLevel.INFO, logger); - // There already is an logger registered. We'll let it know before overwriting it. - if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { - const stack = (_c = new Error().stack) !== null && _c !== void 0 ? _c : ''; - oldLogger.warn(`Current logger will be overwritten from ${stack}`); - newLogger.warn(`Current logger will overwrite one already registered from ${stack}`); - } - return (0, global_utils_1.registerGlobal)('diag', newLogger, self, true); - }; - self.setLogger = setLogger; - self.disable = () => { - (0, global_utils_1.unregisterGlobal)(API_NAME, self); - }; - self.createComponentLogger = (options) => { - return new ComponentLogger_1.DiagComponentLogger(options); - }; - self.verbose = _logProxy('verbose'); - self.debug = _logProxy('debug'); - self.info = _logProxy('info'); - self.warn = _logProxy('warn'); - self.error = _logProxy('error'); + then(onfulfilled, onrejected) { + return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } - /** Get the singleton instance of the DiagAPI API */ - static instance() { - if (!this._instance) { - this._instance = new DiagAPI(); - } - return this._instance; + promiseFinished() { + return __awaiter(this, void 0, void 0, function* () { + let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + headers, + response, + status, + trailers + }; + }); } } -exports.DiagAPI = DiagAPI; -//# sourceMappingURL=diag.js.map +exports.ClientStreamingCall = ClientStreamingCall; + /***/ }), -/***/ 13856: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 13630: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MetricsAPI = void 0; -const NoopMeterProvider_1 = __nccwpck_require__(99682); -const global_utils_1 = __nccwpck_require__(27110); -const diag_1 = __nccwpck_require__(30156); -const API_NAME = 'metrics'; +exports.Deferred = exports.DeferredState = void 0; +var DeferredState; +(function (DeferredState) { + DeferredState[DeferredState["PENDING"] = 0] = "PENDING"; + DeferredState[DeferredState["REJECTED"] = 1] = "REJECTED"; + DeferredState[DeferredState["RESOLVED"] = 2] = "RESOLVED"; +})(DeferredState = exports.DeferredState || (exports.DeferredState = {})); /** - * Singleton object which represents the entry point to the OpenTelemetry Metrics API + * A deferred promise. This is a "controller" for a promise, which lets you + * pass a promise around and reject or resolve it from the outside. + * + * Warning: This class is to be used with care. Using it can make code very + * difficult to read. It is intended for use in library code that exposes + * promises, not for regular business logic. */ -class MetricsAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { } - /** Get the singleton instance of the Metrics API */ - static getInstance() { - if (!this._instance) { - this._instance = new MetricsAPI(); - } - return this._instance; - } +class Deferred { /** - * Set the current global meter provider. - * Returns true if the meter provider was successfully registered, else false. + * @param preventUnhandledRejectionWarning - prevents the warning + * "Unhandled Promise rejection" by adding a noop rejection handler. + * Working with calls returned from the runtime-rpc package in an + * async function usually means awaiting one call property after + * the other. This means that the "status" is not being awaited when + * an earlier await for the "headers" is rejected. This causes the + * "unhandled promise reject" warning. A more correct behaviour for + * calls might be to become aware whether at least one of the + * promises is handled and swallow the rejection warning for the + * others. */ - setGlobalMeterProvider(provider) { - return (0, global_utils_1.registerGlobal)(API_NAME, provider, diag_1.DiagAPI.instance()); + constructor(preventUnhandledRejectionWarning = true) { + this._state = DeferredState.PENDING; + this._promise = new Promise((resolve, reject) => { + this._resolve = resolve; + this._reject = reject; + }); + if (preventUnhandledRejectionWarning) { + this._promise.catch(_ => { }); + } } /** - * Returns the global meter provider. + * Get the current state of the promise. */ - getMeterProvider() { - return (0, global_utils_1.getGlobal)(API_NAME) || NoopMeterProvider_1.NOOP_METER_PROVIDER; + get state() { + return this._state; } /** - * Returns a meter from the global meter provider. + * Get the deferred promise. */ - getMeter(name, version, options) { - return this.getMeterProvider().getMeter(name, version, options); - } - /** Remove the global meter provider */ - disable() { - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); - } -} -exports.MetricsAPI = MetricsAPI; -//# sourceMappingURL=metrics.js.map - -/***/ }), - -/***/ 2596: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PropagationAPI = void 0; -const global_utils_1 = __nccwpck_require__(27110); -const NoopTextMapPropagator_1 = __nccwpck_require__(98319); -const TextMapPropagator_1 = __nccwpck_require__(60442); -const context_helpers_1 = __nccwpck_require__(73343); -const utils_1 = __nccwpck_require__(21922); -const diag_1 = __nccwpck_require__(30156); -const API_NAME = 'propagation'; -const NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); -/** - * Singleton object which represents the entry point to the OpenTelemetry Propagation API - */ -class PropagationAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { - this.createBaggage = utils_1.createBaggage; - this.getBaggage = context_helpers_1.getBaggage; - this.getActiveBaggage = context_helpers_1.getActiveBaggage; - this.setBaggage = context_helpers_1.setBaggage; - this.deleteBaggage = context_helpers_1.deleteBaggage; - } - /** Get the singleton instance of the Propagator API */ - static getInstance() { - if (!this._instance) { - this._instance = new PropagationAPI(); - } - return this._instance; + get promise() { + return this._promise; } /** - * Set the current propagator. - * - * @returns true if the propagator was successfully registered, else false + * Resolve the promise. Throws if the promise is already resolved or rejected. */ - setGlobalPropagator(propagator) { - return (0, global_utils_1.registerGlobal)(API_NAME, propagator, diag_1.DiagAPI.instance()); + resolve(value) { + if (this.state !== DeferredState.PENDING) + throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); + this._resolve(value); + this._state = DeferredState.RESOLVED; } /** - * Inject context into a carrier to be propagated inter-process - * - * @param context Context carrying tracing data to inject - * @param carrier carrier to inject context into - * @param setter Function used to set values on the carrier + * Reject the promise. Throws if the promise is already resolved or rejected. */ - inject(context, carrier, setter = TextMapPropagator_1.defaultTextMapSetter) { - return this._getGlobalPropagator().inject(context, carrier, setter); + reject(reason) { + if (this.state !== DeferredState.PENDING) + throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); + this._reject(reason); + this._state = DeferredState.REJECTED; } /** - * Extract context from a carrier - * - * @param context Context which the newly created context will inherit from - * @param carrier Carrier to extract context from - * @param getter Function used to extract keys from a carrier + * Resolve the promise. Ignore if not pending. */ - extract(context, carrier, getter = TextMapPropagator_1.defaultTextMapGetter) { - return this._getGlobalPropagator().extract(context, carrier, getter); + resolvePending(val) { + if (this._state === DeferredState.PENDING) + this.resolve(val); } /** - * Return a list of all fields which may be used by the propagator. + * Reject the promise. Ignore if not pending. */ - fields() { - return this._getGlobalPropagator().fields(); - } - /** Remove the global propagator */ - disable() { - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); - } - _getGlobalPropagator() { - return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + rejectPending(reason) { + if (this._state === DeferredState.PENDING) + this.reject(reason); } } -exports.PropagationAPI = PropagationAPI; -//# sourceMappingURL=propagation.js.map +exports.Deferred = Deferred; + /***/ }), -/***/ 53675: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 77617: +/***/ (function(__unused_webpack_module, exports) { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TraceAPI = void 0; -const global_utils_1 = __nccwpck_require__(27110); -const ProxyTracerProvider_1 = __nccwpck_require__(5064); -const spancontext_utils_1 = __nccwpck_require__(73312); -const context_utils_1 = __nccwpck_require__(59317); -const diag_1 = __nccwpck_require__(30156); -const API_NAME = 'trace'; -/** - * Singleton object which represents the entry point to the OpenTelemetry Tracing API - */ -class TraceAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { - this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); - this.wrapSpanContext = spancontext_utils_1.wrapSpanContext; - this.isSpanContextValid = spancontext_utils_1.isSpanContextValid; - this.deleteSpan = context_utils_1.deleteSpan; - this.getSpan = context_utils_1.getSpan; - this.getActiveSpan = context_utils_1.getActiveSpan; - this.getSpanContext = context_utils_1.getSpanContext; - this.setSpan = context_utils_1.setSpan; - this.setSpanContext = context_utils_1.setSpanContext; - } - /** Get the singleton instance of the Trace API */ - static getInstance() { - if (!this._instance) { - this._instance = new TraceAPI(); - } - return this._instance; - } - /** - * Set the current global tracer. - * - * @returns true if the tracer provider was successfully registered, else false - */ - setGlobalTracerProvider(provider) { - const success = (0, global_utils_1.registerGlobal)(API_NAME, this._proxyTracerProvider, diag_1.DiagAPI.instance()); - if (success) { - this._proxyTracerProvider.setDelegate(provider); - } - return success; - } - /** - * Returns the global tracer provider. - */ - getTracerProvider() { - return (0, global_utils_1.getGlobal)(API_NAME) || this._proxyTracerProvider; +exports.DuplexStreamingCall = void 0; +/** + * A duplex streaming RPC call. This means that the clients sends an + * arbitrary amount of messages to the server, while at the same time, + * the server sends an arbitrary amount of messages to the client. + */ +class DuplexStreamingCall { + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.requests = request; + this.headers = headers; + this.responses = response; + this.status = status; + this.trailers = trailers; } /** - * Returns a tracer from the global tracer provider. + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * Note that it may still be valid to send more request messages. */ - getTracer(name, version) { - return this.getTracerProvider().getTracer(name, version); + then(onfulfilled, onrejected) { + return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } - /** Remove the global tracer provider */ - disable() { - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); - this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + promiseFinished() { + return __awaiter(this, void 0, void 0, function* () { + let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + headers, + status, + trailers, + }; + }); } } -exports.TraceAPI = TraceAPI; -//# sourceMappingURL=trace.js.map +exports.DuplexStreamingCall = DuplexStreamingCall; + /***/ }), -/***/ 73343: +/***/ 84183: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +// Public API of the rpc runtime. +// Note: we do not use `export * from ...` to help tree shakers, +// webpack verbose output hints that this should be useful Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.deleteBaggage = exports.setBaggage = exports.getActiveBaggage = exports.getBaggage = void 0; -const context_1 = __nccwpck_require__(14430); -const context_2 = __nccwpck_require__(41544); -/** - * Baggage key - */ -const BAGGAGE_KEY = (0, context_2.createContextKey)('OpenTelemetry Baggage Key'); -/** - * Retrieve the current baggage from the given context - * - * @param {Context} Context that manage all context values - * @returns {Baggage} Extracted baggage from the context - */ -function getBaggage(context) { - return context.getValue(BAGGAGE_KEY) || undefined; -} -exports.getBaggage = getBaggage; -/** - * Retrieve the current baggage from the active/current context - * - * @returns {Baggage} Extracted baggage from the context - */ -function getActiveBaggage() { - return getBaggage(context_1.ContextAPI.getInstance().active()); -} -exports.getActiveBaggage = getActiveBaggage; -/** - * Store a baggage in the given context - * - * @param {Context} Context that manage all context values - * @param {Baggage} baggage that will be set in the actual context - */ -function setBaggage(context, baggage) { - return context.setValue(BAGGAGE_KEY, baggage); -} -exports.setBaggage = setBaggage; -/** - * Delete the baggage stored in the given context - * - * @param {Context} Context that manage all context values - */ -function deleteBaggage(context) { - return context.deleteValue(BAGGAGE_KEY); -} -exports.deleteBaggage = deleteBaggage; -//# sourceMappingURL=context-helpers.js.map +var service_type_1 = __nccwpck_require__(99754); +Object.defineProperty(exports, "ServiceType", ({ enumerable: true, get: function () { return service_type_1.ServiceType; } })); +var reflection_info_1 = __nccwpck_require__(45344); +Object.defineProperty(exports, "readMethodOptions", ({ enumerable: true, get: function () { return reflection_info_1.readMethodOptions; } })); +Object.defineProperty(exports, "readMethodOption", ({ enumerable: true, get: function () { return reflection_info_1.readMethodOption; } })); +Object.defineProperty(exports, "readServiceOption", ({ enumerable: true, get: function () { return reflection_info_1.readServiceOption; } })); +var rpc_error_1 = __nccwpck_require__(65207); +Object.defineProperty(exports, "RpcError", ({ enumerable: true, get: function () { return rpc_error_1.RpcError; } })); +var rpc_options_1 = __nccwpck_require__(32817); +Object.defineProperty(exports, "mergeRpcOptions", ({ enumerable: true, get: function () { return rpc_options_1.mergeRpcOptions; } })); +var rpc_output_stream_1 = __nccwpck_require__(98790); +Object.defineProperty(exports, "RpcOutputStreamController", ({ enumerable: true, get: function () { return rpc_output_stream_1.RpcOutputStreamController; } })); +var test_transport_1 = __nccwpck_require__(6945); +Object.defineProperty(exports, "TestTransport", ({ enumerable: true, get: function () { return test_transport_1.TestTransport; } })); +var deferred_1 = __nccwpck_require__(13630); +Object.defineProperty(exports, "Deferred", ({ enumerable: true, get: function () { return deferred_1.Deferred; } })); +Object.defineProperty(exports, "DeferredState", ({ enumerable: true, get: function () { return deferred_1.DeferredState; } })); +var duplex_streaming_call_1 = __nccwpck_require__(77617); +Object.defineProperty(exports, "DuplexStreamingCall", ({ enumerable: true, get: function () { return duplex_streaming_call_1.DuplexStreamingCall; } })); +var client_streaming_call_1 = __nccwpck_require__(37686); +Object.defineProperty(exports, "ClientStreamingCall", ({ enumerable: true, get: function () { return client_streaming_call_1.ClientStreamingCall; } })); +var server_streaming_call_1 = __nccwpck_require__(92403); +Object.defineProperty(exports, "ServerStreamingCall", ({ enumerable: true, get: function () { return server_streaming_call_1.ServerStreamingCall; } })); +var unary_call_1 = __nccwpck_require__(19685); +Object.defineProperty(exports, "UnaryCall", ({ enumerable: true, get: function () { return unary_call_1.UnaryCall; } })); +var rpc_interceptor_1 = __nccwpck_require__(67514); +Object.defineProperty(exports, "stackIntercept", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackIntercept; } })); +Object.defineProperty(exports, "stackDuplexStreamingInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackDuplexStreamingInterceptors; } })); +Object.defineProperty(exports, "stackClientStreamingInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackClientStreamingInterceptors; } })); +Object.defineProperty(exports, "stackServerStreamingInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackServerStreamingInterceptors; } })); +Object.defineProperty(exports, "stackUnaryInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackUnaryInterceptors; } })); +var server_call_context_1 = __nccwpck_require__(41963); +Object.defineProperty(exports, "ServerCallContextController", ({ enumerable: true, get: function () { return server_call_context_1.ServerCallContextController; } })); + /***/ }), -/***/ 70011: -/***/ ((__unused_webpack_module, exports) => { +/***/ 45344: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.readServiceOption = exports.readMethodOption = exports.readMethodOptions = exports.normalizeMethodInfo = void 0; +const runtime_1 = __nccwpck_require__(82905); +/** + * Turns PartialMethodInfo into MethodInfo. + */ +function normalizeMethodInfo(method, service) { + var _a, _b, _c; + let m = method; + m.service = service; + m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); + // noinspection PointlessBooleanExpressionJS + m.serverStreaming = !!m.serverStreaming; + // noinspection PointlessBooleanExpressionJS + m.clientStreaming = !!m.clientStreaming; + m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; + m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : undefined; + return m; +} +exports.normalizeMethodInfo = normalizeMethodInfo; +/** + * Read custom method options from a generated service client. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * @deprecated use readMethodOption() */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BaggageImpl = void 0; -class BaggageImpl { - constructor(entries) { - this._entries = entries ? new Map(entries) : new Map(); - } - getEntry(key) { - const entry = this._entries.get(key); - if (!entry) { - return undefined; - } - return Object.assign({}, entry); - } - getAllEntries() { - return Array.from(this._entries.entries()).map(([k, v]) => [k, v]); - } - setEntry(key, entry) { - const newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.set(key, entry); - return newBaggage; +function readMethodOptions(service, methodName, extensionName, extensionType) { + var _a; + const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; + return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : undefined; +} +exports.readMethodOptions = readMethodOptions; +function readMethodOption(service, methodName, extensionName, extensionType) { + var _a; + const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; + if (!options) { + return undefined; } - removeEntry(key) { - const newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.delete(key); - return newBaggage; + const optionVal = options[extensionName]; + if (optionVal === undefined) { + return optionVal; } - removeEntries(...keys) { - const newBaggage = new BaggageImpl(this._entries); - for (const key of keys) { - newBaggage._entries.delete(key); - } - return newBaggage; + return extensionType ? extensionType.fromJson(optionVal) : optionVal; +} +exports.readMethodOption = readMethodOption; +function readServiceOption(service, extensionName, extensionType) { + const options = service.options; + if (!options) { + return undefined; } - clear() { - return new BaggageImpl(); + const optionVal = options[extensionName]; + if (optionVal === undefined) { + return optionVal; } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; } -exports.BaggageImpl = BaggageImpl; -//# sourceMappingURL=baggage-impl.js.map +exports.readServiceOption = readServiceOption; + /***/ }), -/***/ 3292: +/***/ 65207: /***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.baggageEntryMetadataSymbol = void 0; +exports.RpcError = void 0; /** - * Symbol used to make BaggageEntryMetadata an opaque type - */ -exports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); -//# sourceMappingURL=symbol.js.map - -/***/ }), - -/***/ 21922: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * An error that occurred while calling a RPC method. */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; -const diag_1 = __nccwpck_require__(30156); -const baggage_impl_1 = __nccwpck_require__(70011); -const symbol_1 = __nccwpck_require__(3292); -const diag = diag_1.DiagAPI.instance(); -/** - * Create a new Baggage with optional entries - * - * @param entries An array of baggage entries the new baggage should contain - */ -function createBaggage(entries = {}) { - return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); -} -exports.createBaggage = createBaggage; -/** - * Create a serializable BaggageEntryMetadata object from a string. - * - * @param str string metadata. Format is currently not defined by the spec and has no special meaning. - * - */ -function baggageEntryMetadataFromString(str) { - if (typeof str !== 'string') { - diag.error(`Cannot create baggage metadata from unknown type: ${typeof str}`); - str = ''; +class RpcError extends Error { + constructor(message, code = 'UNKNOWN', meta) { + super(message); + this.name = 'RpcError'; + // see https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-2.html#example + Object.setPrototypeOf(this, new.target.prototype); + this.code = code; + this.meta = meta !== null && meta !== void 0 ? meta : {}; + } + toString() { + const l = [this.name + ': ' + this.message]; + if (this.code) { + l.push(''); + l.push('Code: ' + this.code); + } + if (this.serviceName && this.methodName) { + l.push('Method: ' + this.serviceName + '/' + this.methodName); + } + let m = Object.entries(this.meta); + if (m.length) { + l.push(''); + l.push('Meta:'); + for (let [k, v] of m) { + l.push(` ${k}: ${v}`); + } + } + return l.join('\n'); } - return { - __TYPE__: symbol_1.baggageEntryMetadataSymbol, - toString() { - return str; - }, - }; } -exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 37543: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +exports.RpcError = RpcError; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.context = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const context_1 = __nccwpck_require__(14430); -/** Entrypoint for context API */ -exports.context = context_1.ContextAPI.getInstance(); -//# sourceMappingURL=context-api.js.map /***/ }), -/***/ 63571: +/***/ 67514: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoopContextManager = void 0; -const context_1 = __nccwpck_require__(41544); -class NoopContextManager { - active() { - return context_1.ROOT_CONTEXT; - } - with(_context, fn, thisArg, ...args) { - return fn.call(thisArg, ...args); +exports.stackDuplexStreamingInterceptors = exports.stackClientStreamingInterceptors = exports.stackServerStreamingInterceptors = exports.stackUnaryInterceptors = exports.stackIntercept = void 0; +const runtime_1 = __nccwpck_require__(82905); +/** + * Creates a "stack" of of all interceptors specified in the given `RpcOptions`. + * Used by generated client implementations. + * @internal + */ +function stackIntercept(kind, transport, method, options, input) { + var _a, _b, _c, _d; + if (kind == "unary") { + let tail = (mtd, inp, opt) => transport.unary(mtd, inp, opt); + for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter(i => i.interceptUnary).reverse()) { + const next = tail; + tail = (mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt); + } + return tail(method, input, options); } - bind(_context, target) { - return target; + if (kind == "serverStreaming") { + let tail = (mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt); + for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter(i => i.interceptServerStreaming).reverse()) { + const next = tail; + tail = (mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt); + } + return tail(method, input, options); } - enable() { - return this; + if (kind == "clientStreaming") { + let tail = (mtd, opt) => transport.clientStreaming(mtd, opt); + for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter(i => i.interceptClientStreaming).reverse()) { + const next = tail; + tail = (mtd, opt) => curr.interceptClientStreaming(next, mtd, opt); + } + return tail(method, options); } - disable() { - return this; + if (kind == "duplex") { + let tail = (mtd, opt) => transport.duplex(mtd, opt); + for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter(i => i.interceptDuplex).reverse()) { + const next = tail; + tail = (mtd, opt) => curr.interceptDuplex(next, mtd, opt); + } + return tail(method, options); } + runtime_1.assertNever(kind); } -exports.NoopContextManager = NoopContextManager; -//# sourceMappingURL=NoopContextManager.js.map - -/***/ }), - -/***/ 41544: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. +exports.stackIntercept = stackIntercept; +/** + * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ROOT_CONTEXT = exports.createContextKey = void 0; -/** Get a key to uniquely identify a context value */ -function createContextKey(description) { - // The specification states that for the same input, multiple calls should - // return different keys. Due to the nature of the JS dependency management - // system, this creates problems where multiple versions of some package - // could hold different keys for the same property. - // - // Therefore, we use Symbol.for which returns the same key for the same input. - return Symbol.for(description); +function stackUnaryInterceptors(transport, method, input, options) { + return stackIntercept("unary", transport, method, options, input); } -exports.createContextKey = createContextKey; -class BaseContext { - /** - * Construct a new context which inherits values from an optional parent context. - * - * @param parentContext a context from which to inherit values - */ - constructor(parentContext) { - // for minification - const self = this; - self._currentContext = parentContext ? new Map(parentContext) : new Map(); - self.getValue = (key) => self._currentContext.get(key); - self.setValue = (key, value) => { - const context = new BaseContext(self._currentContext); - context._currentContext.set(key, value); - return context; - }; - self.deleteValue = (key) => { - const context = new BaseContext(self._currentContext); - context._currentContext.delete(key); - return context; - }; - } +exports.stackUnaryInterceptors = stackUnaryInterceptors; +/** + * @deprecated replaced by `stackIntercept()`, still here to support older generated code + */ +function stackServerStreamingInterceptors(transport, method, input, options) { + return stackIntercept("serverStreaming", transport, method, options, input); } -/** The root context is used as the default parent context when there is no active context */ -exports.ROOT_CONTEXT = new BaseContext(); -//# sourceMappingURL=context.js.map - -/***/ }), - -/***/ 94713: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. +exports.stackServerStreamingInterceptors = stackServerStreamingInterceptors; +/** + * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.diag = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const diag_1 = __nccwpck_require__(30156); +function stackClientStreamingInterceptors(transport, method, options) { + return stackIntercept("clientStreaming", transport, method, options); +} +exports.stackClientStreamingInterceptors = stackClientStreamingInterceptors; /** - * Entrypoint for Diag API. - * Defines Diagnostic handler used for internal diagnostic logging operations. - * The default provides a Noop DiagLogger implementation which may be changed via the - * diag.setLogger(logger: DiagLogger) function. + * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ -exports.diag = diag_1.DiagAPI.instance(); -//# sourceMappingURL=diag-api.js.map +function stackDuplexStreamingInterceptors(transport, method, options) { + return stackIntercept("duplex", transport, method, options); +} +exports.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; + /***/ }), -/***/ 47645: +/***/ 32817: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.mergeRpcOptions = void 0; +const runtime_1 = __nccwpck_require__(82905); +/** + * Merges custom RPC options with defaults. Returns a new instance and keeps + * the "defaults" and the "options" unmodified. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Merges `RpcMetadata` "meta", overwriting values from "defaults" with + * values from "options". Does not append values to existing entries. * - * https://www.apache.org/licenses/LICENSE-2.0 + * Merges "jsonOptions", including "jsonOptions.typeRegistry", by creating + * a new array that contains types from "options.jsonOptions.typeRegistry" + * first, then types from "defaults.jsonOptions.typeRegistry". * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DiagComponentLogger = void 0; -const global_utils_1 = __nccwpck_require__(27110); -/** - * Component Logger which is meant to be used as part of any component which - * will add automatically additional namespace in front of the log message. - * It will then forward all message to global diag logger - * @example - * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); - * cLogger.debug('test'); - * // @opentelemetry/instrumentation-http test + * Merges "binaryOptions". + * + * Merges "interceptors" by creating a new array that contains interceptors + * from "defaults" first, then interceptors from "options". + * + * Works with objects that extend `RpcOptions`, but only if the added + * properties are of type Date, primitive like string, boolean, or Array + * of primitives. If you have other property types, you have to merge them + * yourself. */ -class DiagComponentLogger { - constructor(props) { - this._namespace = props.namespace || 'DiagComponentLogger'; - } - debug(...args) { - return logProxy('debug', this._namespace, args); - } - error(...args) { - return logProxy('error', this._namespace, args); - } - info(...args) { - return logProxy('info', this._namespace, args); - } - warn(...args) { - return logProxy('warn', this._namespace, args); - } - verbose(...args) { - return logProxy('verbose', this._namespace, args); +function mergeRpcOptions(defaults, options) { + if (!options) + return defaults; + let o = {}; + copy(defaults, o); + copy(options, o); + for (let key of Object.keys(options)) { + let val = options[key]; + switch (key) { + case "jsonOptions": + o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); + break; + case "binaryOptions": + o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); + break; + case "meta": + o.meta = {}; + copy(defaults.meta, o.meta); + copy(options.meta, o.meta); + break; + case "interceptors": + o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); + break; + } } + return o; } -exports.DiagComponentLogger = DiagComponentLogger; -function logProxy(funcName, namespace, args) { - const logger = (0, global_utils_1.getGlobal)('diag'); - // shortcut if logger not set - if (!logger) { +exports.mergeRpcOptions = mergeRpcOptions; +function copy(a, into) { + if (!a) return; + let c = into; + for (let [k, v] of Object.entries(a)) { + if (v instanceof Date) + c[k] = new Date(v.getTime()); + else if (Array.isArray(v)) + c[k] = v.concat(); + else + c[k] = v; } - args.unshift(namespace); - return logger[funcName](...args); } -//# sourceMappingURL=ComponentLogger.js.map + /***/ }), -/***/ 54800: -/***/ ((__unused_webpack_module, exports) => { +/***/ 98790: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DiagConsoleLogger = void 0; -const consoleMap = [ - { n: 'error', c: 'error' }, - { n: 'warn', c: 'warn' }, - { n: 'info', c: 'info' }, - { n: 'debug', c: 'debug' }, - { n: 'verbose', c: 'trace' }, -]; +exports.RpcOutputStreamController = void 0; +const deferred_1 = __nccwpck_require__(13630); +const runtime_1 = __nccwpck_require__(82905); /** - * A simple Immutable Console based diagnostic logger which will output any messages to the Console. - * If you want to limit the amount of logging to a specific level or lower use the - * {@link createLogLevelDiagLogger} + * A `RpcOutputStream` that you control. */ -class DiagConsoleLogger { +class RpcOutputStreamController { constructor() { - function _consoleFunc(funcName) { - return function (...args) { - if (console) { - // Some environments only expose the console when the F12 developer console is open - // eslint-disable-next-line no-console - let theFunc = console[funcName]; - if (typeof theFunc !== 'function') { - // Not all environments support all functions - // eslint-disable-next-line no-console - theFunc = console.log; - } - // One last final check - if (typeof theFunc === 'function') { - return theFunc.apply(console, args); - } - } - }; + this._lis = { + nxt: [], + msg: [], + err: [], + cmp: [], + }; + this._closed = false; + } + // --- RpcOutputStream callback API + onNext(callback) { + return this.addLis(callback, this._lis.nxt); + } + onMessage(callback) { + return this.addLis(callback, this._lis.msg); + } + onError(callback) { + return this.addLis(callback, this._lis.err); + } + onComplete(callback) { + return this.addLis(callback, this._lis.cmp); + } + addLis(callback, list) { + list.push(callback); + return () => { + let i = list.indexOf(callback); + if (i >= 0) + list.splice(i, 1); + }; + } + // remove all listeners + clearLis() { + for (let l of Object.values(this._lis)) + l.splice(0, l.length); + } + // --- Controller API + /** + * Is this stream already closed by a completion or error? + */ + get closed() { + return this._closed !== false; + } + /** + * Emit message, close with error, or close successfully, but only one + * at a time. + * Can be used to wrap a stream by using the other stream's `onNext`. + */ + notifyNext(message, error, complete) { + runtime_1.assert((message ? 1 : 0) + (error ? 1 : 0) + (complete ? 1 : 0) <= 1, 'only one emission at a time'); + if (message) + this.notifyMessage(message); + if (error) + this.notifyError(error); + if (complete) + this.notifyComplete(); + } + /** + * Emits a new message. Throws if stream is closed. + * + * Triggers onNext and onMessage callbacks. + */ + notifyMessage(message) { + runtime_1.assert(!this.closed, 'stream is closed'); + this.pushIt({ value: message, done: false }); + this._lis.msg.forEach(l => l(message)); + this._lis.nxt.forEach(l => l(message, undefined, false)); + } + /** + * Closes the stream with an error. Throws if stream is closed. + * + * Triggers onNext and onError callbacks. + */ + notifyError(error) { + runtime_1.assert(!this.closed, 'stream is closed'); + this._closed = error; + this.pushIt(error); + this._lis.err.forEach(l => l(error)); + this._lis.nxt.forEach(l => l(undefined, error, false)); + this.clearLis(); + } + /** + * Closes the stream successfully. Throws if stream is closed. + * + * Triggers onNext and onComplete callbacks. + */ + notifyComplete() { + runtime_1.assert(!this.closed, 'stream is closed'); + this._closed = true; + this.pushIt({ value: null, done: true }); + this._lis.cmp.forEach(l => l()); + this._lis.nxt.forEach(l => l(undefined, undefined, true)); + this.clearLis(); + } + /** + * Creates an async iterator (that can be used with `for await {...}`) + * to consume the stream. + * + * Some things to note: + * - If an error occurs, the `for await` will throw it. + * - If an error occurred before the `for await` was started, `for await` + * will re-throw it. + * - If the stream is already complete, the `for await` will be empty. + * - If your `for await` consumes slower than the stream produces, + * for example because you are relaying messages in a slow operation, + * messages are queued. + */ + [Symbol.asyncIterator]() { + // init the iterator state, enabling pushIt() + if (!this._itState) { + this._itState = { q: [] }; + } + // if we are closed, we are definitely not receiving any more messages. + // but we can't let the iterator get stuck. we want to either: + // a) finish the new iterator immediately, because we are completed + // b) reject the new iterator, because we errored + if (this._closed === true) + this.pushIt({ value: null, done: true }); + else if (this._closed !== false) + this.pushIt(this._closed); + // the async iterator + return { + next: () => { + let state = this._itState; + runtime_1.assert(state, "bad state"); // if we don't have a state here, code is broken + // there should be no pending result. + // did the consumer call next() before we resolved our previous result promise? + runtime_1.assert(!state.p, "iterator contract broken"); + // did we produce faster than the iterator consumed? + // return the oldest result from the queue. + let first = state.q.shift(); + if (first) + return ("value" in first) ? Promise.resolve(first) : Promise.reject(first); + // we have no result ATM, but we promise one. + // as soon as we have a result, we must resolve promise. + state.p = new deferred_1.Deferred(); + return state.p.promise; + }, + }; + } + // "push" a new iterator result. + // this either resolves a pending promise, or enqueues the result. + pushIt(result) { + let state = this._itState; + if (!state) + return; + // is the consumer waiting for us? + if (state.p) { + // yes, consumer is waiting for this promise. + const p = state.p; + runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); + // resolve the promise + ("value" in result) ? p.resolve(result) : p.reject(result); + // must cleanup, otherwise iterator.next() would pick it up again. + delete state.p; } - for (let i = 0; i < consoleMap.length; i++) { - this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + else { + // we are producing faster than the iterator consumes. + // push result onto queue. + state.q.push(result); } } } -exports.DiagConsoleLogger = DiagConsoleLogger; -//# sourceMappingURL=consoleLogger.js.map +exports.RpcOutputStreamController = RpcOutputStreamController; + /***/ }), -/***/ 3484: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 41963: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createLogLevelDiagLogger = void 0; -const types_1 = __nccwpck_require__(60953); -function createLogLevelDiagLogger(maxLevel, logger) { - if (maxLevel < types_1.DiagLogLevel.NONE) { - maxLevel = types_1.DiagLogLevel.NONE; - } - else if (maxLevel > types_1.DiagLogLevel.ALL) { - maxLevel = types_1.DiagLogLevel.ALL; +exports.ServerCallContextController = void 0; +class ServerCallContextController { + constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: 'OK', detail: '' }) { + this._cancelled = false; + this._listeners = []; + this.method = method; + this.headers = headers; + this.deadline = deadline; + this.trailers = {}; + this._sendRH = sendResponseHeadersFn; + this.status = defaultStatus; } - // In case the logger is null or undefined - logger = logger || {}; - function _filterFunc(funcName, theLevel) { - const theFunc = logger[funcName]; - if (typeof theFunc === 'function' && maxLevel >= theLevel) { - return theFunc.bind(logger); + /** + * Set the call cancelled. + * + * Invokes all callbacks registered with onCancel() and + * sets `cancelled = true`. + */ + notifyCancelled() { + if (!this._cancelled) { + this._cancelled = true; + for (let l of this._listeners) { + l(); + } } - return function () { }; } - return { - error: _filterFunc('error', types_1.DiagLogLevel.ERROR), - warn: _filterFunc('warn', types_1.DiagLogLevel.WARN), - info: _filterFunc('info', types_1.DiagLogLevel.INFO), - debug: _filterFunc('debug', types_1.DiagLogLevel.DEBUG), - verbose: _filterFunc('verbose', types_1.DiagLogLevel.VERBOSE), - }; + /** + * Send response headers. + */ + sendResponseHeaders(data) { + this._sendRH(data); + } + /** + * Is the call cancelled? + * + * When the client closes the connection before the server + * is done, the call is cancelled. + * + * If you want to cancel a request on the server, throw a + * RpcError with the CANCELLED status code. + */ + get cancelled() { + return this._cancelled; + } + /** + * Add a callback for cancellation. + */ + onCancel(callback) { + const l = this._listeners; + l.push(callback); + return () => { + let i = l.indexOf(callback); + if (i >= 0) + l.splice(i, 1); + }; + } } -exports.createLogLevelDiagLogger = createLogLevelDiagLogger; -//# sourceMappingURL=logLevelLogger.js.map +exports.ServerCallContextController = ServerCallContextController; + /***/ }), -/***/ 60953: -/***/ ((__unused_webpack_module, exports) => { +/***/ 92403: +/***/ (function(__unused_webpack_module, exports) { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DiagLogLevel = void 0; +exports.ServerStreamingCall = void 0; /** - * Defines the available internal logging levels for the diagnostic logger, the numeric values - * of the levels are defined to match the original values from the initial LogLevel to avoid - * compatibility/migration issues for any implementation that assume the numeric ordering. + * A server streaming RPC call. The client provides exactly one input message + * but the server may respond with 0, 1, or more messages. */ -var DiagLogLevel; -(function (DiagLogLevel) { - /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ - DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; - /** Identifies an error scenario */ - DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; - /** Identifies a warning scenario */ - DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; - /** General informational log message */ - DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; - /** General debug log message */ - DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; +class ServerStreamingCall { + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.request = request; + this.headers = headers; + this.responses = response; + this.status = status; + this.trailers = trailers; + } /** - * Detailed trace level logging should only be used for development, should only be set - * in a development environment. + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * You should first setup some listeners to the `request` to + * see the actual messages the server replied with. */ - DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; - /** Used to set the logging level to include all logging */ - DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; -})(DiagLogLevel = exports.DiagLogLevel || (exports.DiagLogLevel = {})); -//# sourceMappingURL=types.js.map + then(onfulfilled, onrejected) { + return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + promiseFinished() { + return __awaiter(this, void 0, void 0, function* () { + let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + request: this.request, + headers, + status, + trailers, + }; + }); + } +} +exports.ServerStreamingCall = ServerStreamingCall; + /***/ }), -/***/ 32451: +/***/ 99754: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.trace = exports.propagation = exports.metrics = exports.diag = exports.context = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.createTraceState = exports.TraceFlags = exports.SpanStatusCode = exports.SpanKind = exports.SamplingDecision = exports.ProxyTracerProvider = exports.ProxyTracer = exports.defaultTextMapSetter = exports.defaultTextMapGetter = exports.ValueType = exports.createNoopMeter = exports.DiagLogLevel = exports.DiagConsoleLogger = exports.ROOT_CONTEXT = exports.createContextKey = exports.baggageEntryMetadataFromString = void 0; -var utils_1 = __nccwpck_require__(21922); -Object.defineProperty(exports, "baggageEntryMetadataFromString", ({ enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } })); -// Context APIs -var context_1 = __nccwpck_require__(41544); -Object.defineProperty(exports, "createContextKey", ({ enumerable: true, get: function () { return context_1.createContextKey; } })); -Object.defineProperty(exports, "ROOT_CONTEXT", ({ enumerable: true, get: function () { return context_1.ROOT_CONTEXT; } })); -// Diag APIs -var consoleLogger_1 = __nccwpck_require__(54800); -Object.defineProperty(exports, "DiagConsoleLogger", ({ enumerable: true, get: function () { return consoleLogger_1.DiagConsoleLogger; } })); -var types_1 = __nccwpck_require__(60953); -Object.defineProperty(exports, "DiagLogLevel", ({ enumerable: true, get: function () { return types_1.DiagLogLevel; } })); -// Metrics APIs -var NoopMeter_1 = __nccwpck_require__(2882); -Object.defineProperty(exports, "createNoopMeter", ({ enumerable: true, get: function () { return NoopMeter_1.createNoopMeter; } })); -var Metric_1 = __nccwpck_require__(79540); -Object.defineProperty(exports, "ValueType", ({ enumerable: true, get: function () { return Metric_1.ValueType; } })); -// Propagation APIs -var TextMapPropagator_1 = __nccwpck_require__(60442); -Object.defineProperty(exports, "defaultTextMapGetter", ({ enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapGetter; } })); -Object.defineProperty(exports, "defaultTextMapSetter", ({ enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapSetter; } })); -var ProxyTracer_1 = __nccwpck_require__(48754); -Object.defineProperty(exports, "ProxyTracer", ({ enumerable: true, get: function () { return ProxyTracer_1.ProxyTracer; } })); -var ProxyTracerProvider_1 = __nccwpck_require__(5064); -Object.defineProperty(exports, "ProxyTracerProvider", ({ enumerable: true, get: function () { return ProxyTracerProvider_1.ProxyTracerProvider; } })); -var SamplingResult_1 = __nccwpck_require__(26330); -Object.defineProperty(exports, "SamplingDecision", ({ enumerable: true, get: function () { return SamplingResult_1.SamplingDecision; } })); -var span_kind_1 = __nccwpck_require__(71645); -Object.defineProperty(exports, "SpanKind", ({ enumerable: true, get: function () { return span_kind_1.SpanKind; } })); -var status_1 = __nccwpck_require__(34483); -Object.defineProperty(exports, "SpanStatusCode", ({ enumerable: true, get: function () { return status_1.SpanStatusCode; } })); -var trace_flags_1 = __nccwpck_require__(56140); -Object.defineProperty(exports, "TraceFlags", ({ enumerable: true, get: function () { return trace_flags_1.TraceFlags; } })); -var utils_2 = __nccwpck_require__(9840); -Object.defineProperty(exports, "createTraceState", ({ enumerable: true, get: function () { return utils_2.createTraceState; } })); -var spancontext_utils_1 = __nccwpck_require__(73312); -Object.defineProperty(exports, "isSpanContextValid", ({ enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } })); -Object.defineProperty(exports, "isValidTraceId", ({ enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } })); -Object.defineProperty(exports, "isValidSpanId", ({ enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } })); -var invalid_span_constants_1 = __nccwpck_require__(12999); -Object.defineProperty(exports, "INVALID_SPANID", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPANID; } })); -Object.defineProperty(exports, "INVALID_TRACEID", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_TRACEID; } })); -Object.defineProperty(exports, "INVALID_SPAN_CONTEXT", ({ enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPAN_CONTEXT; } })); -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const context_api_1 = __nccwpck_require__(37543); -Object.defineProperty(exports, "context", ({ enumerable: true, get: function () { return context_api_1.context; } })); -const diag_api_1 = __nccwpck_require__(94713); -Object.defineProperty(exports, "diag", ({ enumerable: true, get: function () { return diag_api_1.diag; } })); -const metrics_api_1 = __nccwpck_require__(23700); -Object.defineProperty(exports, "metrics", ({ enumerable: true, get: function () { return metrics_api_1.metrics; } })); -const propagation_api_1 = __nccwpck_require__(71241); -Object.defineProperty(exports, "propagation", ({ enumerable: true, get: function () { return propagation_api_1.propagation; } })); -const trace_api_1 = __nccwpck_require__(10665); -Object.defineProperty(exports, "trace", ({ enumerable: true, get: function () { return trace_api_1.trace; } })); -// Default export. -exports["default"] = { - context: context_api_1.context, - diag: diag_api_1.diag, - metrics: metrics_api_1.metrics, - propagation: propagation_api_1.propagation, - trace: trace_api_1.trace, -}; -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 27110: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; -const platform_1 = __nccwpck_require__(43743); -const version_1 = __nccwpck_require__(70628); -const semver_1 = __nccwpck_require__(81146); -const major = version_1.VERSION.split('.')[0]; -const GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(`opentelemetry.js.api.${major}`); -const _global = platform_1._globalThis; -function registerGlobal(type, instance, diag, allowOverride = false) { - var _a; - const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { - version: version_1.VERSION, - }); - if (!allowOverride && api[type]) { - // already registered an API of this type - const err = new Error(`@opentelemetry/api: Attempted duplicate registration of API: ${type}`); - diag.error(err.stack || err.message); - return false; - } - if (api.version !== version_1.VERSION) { - // All registered APIs must be of the same version exactly - const err = new Error(`@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${version_1.VERSION}`); - diag.error(err.stack || err.message); - return false; - } - api[type] = instance; - diag.debug(`@opentelemetry/api: Registered a global for ${type} v${version_1.VERSION}.`); - return true; -} -exports.registerGlobal = registerGlobal; -function getGlobal(type) { - var _a, _b; - const globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; - if (!globalVersion || !(0, semver_1.isCompatible)(globalVersion)) { - return; - } - return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; -} -exports.getGlobal = getGlobal; -function unregisterGlobal(type, diag) { - diag.debug(`@opentelemetry/api: Unregistering a global for ${type} v${version_1.VERSION}.`); - const api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; - if (api) { - delete api[type]; +exports.ServiceType = void 0; +const reflection_info_1 = __nccwpck_require__(45344); +class ServiceType { + constructor(typeName, methods, options) { + this.typeName = typeName; + this.methods = methods.map(i => reflection_info_1.normalizeMethodInfo(i, this)); + this.options = options !== null && options !== void 0 ? options : {}; } } -exports.unregisterGlobal = unregisterGlobal; -//# sourceMappingURL=global-utils.js.map +exports.ServiceType = ServiceType; + /***/ }), -/***/ 81146: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6945: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isCompatible = exports._makeCompatibilityCheck = void 0; -const version_1 = __nccwpck_require__(70628); -const re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; -/** - * Create a function to test an API version to see if it is compatible with the provided ownVersion. - * - * The returned function has the following semantics: - * - Exact match is always compatible - * - Major versions must match exactly - * - 1.x package cannot use global 2.x package - * - 2.x package cannot use global 1.x package - * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API - * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects - * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 - * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor - * - Patch and build tag differences are not considered at this time - * - * @param ownVersion version which should be checked against - */ -function _makeCompatibilityCheck(ownVersion) { - const acceptedVersions = new Set([ownVersion]); - const rejectedVersions = new Set(); - const myVersionMatch = ownVersion.match(re); - if (!myVersionMatch) { - // we cannot guarantee compatibility so we always return noop - return () => false; - } - const ownVersionParsed = { - major: +myVersionMatch[1], - minor: +myVersionMatch[2], - patch: +myVersionMatch[3], - prerelease: myVersionMatch[4], - }; - // if ownVersion has a prerelease tag, versions must match exactly - if (ownVersionParsed.prerelease != null) { - return function isExactmatch(globalVersion) { - return globalVersion === ownVersion; - }; +exports.TestTransport = void 0; +const rpc_error_1 = __nccwpck_require__(65207); +const runtime_1 = __nccwpck_require__(82905); +const rpc_output_stream_1 = __nccwpck_require__(98790); +const rpc_options_1 = __nccwpck_require__(32817); +const unary_call_1 = __nccwpck_require__(19685); +const server_streaming_call_1 = __nccwpck_require__(92403); +const client_streaming_call_1 = __nccwpck_require__(37686); +const duplex_streaming_call_1 = __nccwpck_require__(77617); +/** + * Transport for testing. + */ +class TestTransport { + /** + * Initialize with mock data. Omitted fields have default value. + */ + constructor(data) { + /** + * Suppress warning / error about uncaught rejections of + * "status" and "trailers". + */ + this.suppressUncaughtRejections = true; + this.headerDelay = 10; + this.responseDelay = 50; + this.betweenResponseDelay = 10; + this.afterResponseDelay = 10; + this.data = data !== null && data !== void 0 ? data : {}; + } + /** + * Sent message(s) during the last operation. + */ + get sentMessages() { + if (this.lastInput instanceof TestInputStream) { + return this.lastInput.sent; + } + else if (typeof this.lastInput == "object") { + return [this.lastInput.single]; + } + return []; } - function _reject(v) { - rejectedVersions.add(v); + /** + * Sending message(s) completed? + */ + get sendComplete() { + if (this.lastInput instanceof TestInputStream) { + return this.lastInput.completed; + } + else if (typeof this.lastInput == "object") { + return true; + } return false; } - function _accept(v) { - acceptedVersions.add(v); - return true; + // Creates a promise for response headers from the mock data. + promiseHeaders() { + var _a; + const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : TestTransport.defaultHeaders; + return headers instanceof rpc_error_1.RpcError + ? Promise.reject(headers) + : Promise.resolve(headers); } - return function isCompatible(globalVersion) { - if (acceptedVersions.has(globalVersion)) { - return true; + // Creates a promise for a single, valid, message from the mock data. + promiseSingleResponse(method) { + if (this.data.response instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.response); } - if (rejectedVersions.has(globalVersion)) { - return false; + let r; + if (Array.isArray(this.data.response)) { + runtime_1.assert(this.data.response.length > 0); + r = this.data.response[0]; } - const globalVersionMatch = globalVersion.match(re); - if (!globalVersionMatch) { - // cannot parse other version - // we cannot guarantee compatibility so we always noop - return _reject(globalVersion); - } - const globalVersionParsed = { - major: +globalVersionMatch[1], - minor: +globalVersionMatch[2], - patch: +globalVersionMatch[3], - prerelease: globalVersionMatch[4], - }; - // if globalVersion has a prerelease tag, versions must match exactly - if (globalVersionParsed.prerelease != null) { - return _reject(globalVersion); + else if (this.data.response !== undefined) { + r = this.data.response; + } + else { + r = method.O.create(); + } + runtime_1.assert(method.O.is(r)); + return Promise.resolve(r); + } + /** + * Pushes response messages from the mock data to the output stream. + * If an error response, status or trailers are mocked, the stream is + * closed with the respective error. + * Otherwise, stream is completed successfully. + * + * The returned promise resolves when the stream is closed. It should + * not reject. If it does, code is broken. + */ + streamResponses(method, stream, abort) { + return __awaiter(this, void 0, void 0, function* () { + // normalize "data.response" into an array of valid output messages + const messages = []; + if (this.data.response === undefined) { + messages.push(method.O.create()); + } + else if (Array.isArray(this.data.response)) { + for (let msg of this.data.response) { + runtime_1.assert(method.O.is(msg)); + messages.push(msg); + } + } + else if (!(this.data.response instanceof rpc_error_1.RpcError)) { + runtime_1.assert(method.O.is(this.data.response)); + messages.push(this.data.response); + } + // start the stream with an initial delay. + // if the request is cancelled, notify() error and exit. + try { + yield delay(this.responseDelay, abort)(undefined); + } + catch (error) { + stream.notifyError(error); + return; + } + // if error response was mocked, notify() error (stream is now closed with error) and exit. + if (this.data.response instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.response); + return; + } + // regular response messages were mocked. notify() them. + for (let msg of messages) { + stream.notifyMessage(msg); + // add a short delay between responses + // if the request is cancelled, notify() error and exit. + try { + yield delay(this.betweenResponseDelay, abort)(undefined); + } + catch (error) { + stream.notifyError(error); + return; + } + } + // error status was mocked, notify() error (stream is now closed with error) and exit. + if (this.data.status instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.status); + return; + } + // error trailers were mocked, notify() error (stream is now closed with error) and exit. + if (this.data.trailers instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.trailers); + return; + } + // stream completed successfully + stream.notifyComplete(); + }); + } + // Creates a promise for response status from the mock data. + promiseStatus() { + var _a; + const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : TestTransport.defaultStatus; + return status instanceof rpc_error_1.RpcError + ? Promise.reject(status) + : Promise.resolve(status); + } + // Creates a promise for response trailers from the mock data. + promiseTrailers() { + var _a; + const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : TestTransport.defaultTrailers; + return trailers instanceof rpc_error_1.RpcError + ? Promise.reject(trailers) + : Promise.resolve(trailers); + } + maybeSuppressUncaught(...promise) { + if (this.suppressUncaughtRejections) { + for (let p of promise) { + p.catch(() => { + }); + } } - // major versions must match - if (ownVersionParsed.major !== globalVersionParsed.major) { - return _reject(globalVersion); + } + mergeOptions(options) { + return rpc_options_1.mergeRpcOptions({}, options); + } + unary(method, input, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() + .then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise + .catch(_ => { + }) + .then(delay(this.responseDelay, options.abort)) + .then(_ => this.promiseSingleResponse(method)), statusPromise = responsePromise + .catch(_ => { + }) + .then(delay(this.afterResponseDelay, options.abort)) + .then(_ => this.promiseStatus()), trailersPromise = responsePromise + .catch(_ => { + }) + .then(delay(this.afterResponseDelay, options.abort)) + .then(_ => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = { single: input }; + return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); + } + serverStreaming(method, input, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() + .then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise + .then(delay(this.responseDelay, options.abort)) + .catch(() => { + }) + .then(() => this.streamResponses(method, outputStream, options.abort)) + .then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise + .then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise + .then(() => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = { single: input }; + return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); + } + clientStreaming(method, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() + .then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise + .catch(_ => { + }) + .then(delay(this.responseDelay, options.abort)) + .then(_ => this.promiseSingleResponse(method)), statusPromise = responsePromise + .catch(_ => { + }) + .then(delay(this.afterResponseDelay, options.abort)) + .then(_ => this.promiseStatus()), trailersPromise = responsePromise + .catch(_ => { + }) + .then(delay(this.afterResponseDelay, options.abort)) + .then(_ => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = new TestInputStream(this.data, options.abort); + return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); + } + duplex(method, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() + .then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise + .then(delay(this.responseDelay, options.abort)) + .catch(() => { + }) + .then(() => this.streamResponses(method, outputStream, options.abort)) + .then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise + .then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise + .then(() => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = new TestInputStream(this.data, options.abort); + return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); + } +} +exports.TestTransport = TestTransport; +TestTransport.defaultHeaders = { + responseHeader: "test" +}; +TestTransport.defaultStatus = { + code: "OK", detail: "all good" +}; +TestTransport.defaultTrailers = { + responseTrailer: "test" +}; +function delay(ms, abort) { + return (v) => new Promise((resolve, reject) => { + if (abort === null || abort === void 0 ? void 0 : abort.aborted) { + reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); } - if (ownVersionParsed.major === 0) { - if (ownVersionParsed.minor === globalVersionParsed.minor && - ownVersionParsed.patch <= globalVersionParsed.patch) { - return _accept(globalVersion); + else { + const id = setTimeout(() => resolve(v), ms); + if (abort) { + abort.addEventListener("abort", ev => { + clearTimeout(id); + reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + }); } - return _reject(globalVersion); } - if (ownVersionParsed.minor <= globalVersionParsed.minor) { - return _accept(globalVersion); + }); +} +class TestInputStream { + constructor(data, abort) { + this._completed = false; + this._sent = []; + this.data = data; + this.abort = abort; + } + get sent() { + return this._sent; + } + get completed() { + return this._completed; + } + send(message) { + if (this.data.inputMessage instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.inputMessage); + } + const delayMs = this.data.inputMessage === undefined + ? 10 + : this.data.inputMessage; + return Promise.resolve(undefined) + .then(() => { + this._sent.push(message); + }) + .then(delay(delayMs, this.abort)); + } + complete() { + if (this.data.inputComplete instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.inputComplete); } - return _reject(globalVersion); - }; + const delayMs = this.data.inputComplete === undefined + ? 10 + : this.data.inputComplete; + return Promise.resolve(undefined) + .then(() => { + this._completed = true; + }) + .then(delay(delayMs, this.abort)); + } } -exports._makeCompatibilityCheck = _makeCompatibilityCheck; -/** - * Test an API version to see if it is compatible with this API. - * - * - Exact match is always compatible - * - Major versions must match exactly - * - 1.x package cannot use global 2.x package - * - 2.x package cannot use global 1.x package - * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API - * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects - * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 - * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor - * - Patch and build tag differences are not considered at this time - * - * @param version version of the API requesting an instance of the global API - */ -exports.isCompatible = _makeCompatibilityCheck(version_1.VERSION); -//# sourceMappingURL=semver.js.map - -/***/ }), - -/***/ 23700: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.metrics = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const metrics_1 = __nccwpck_require__(13856); -/** Entrypoint for metrics API */ -exports.metrics = metrics_1.MetricsAPI.getInstance(); -//# sourceMappingURL=metrics-api.js.map - -/***/ }), - -/***/ 79540: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ValueType = void 0; -/** The Type of value. It describes how the data is reported. */ -var ValueType; -(function (ValueType) { - ValueType[ValueType["INT"] = 0] = "INT"; - ValueType[ValueType["DOUBLE"] = 1] = "DOUBLE"; -})(ValueType = exports.ValueType || (exports.ValueType = {})); -//# sourceMappingURL=Metric.js.map /***/ }), -/***/ 2882: -/***/ ((__unused_webpack_module, exports) => { +/***/ 19685: +/***/ (function(__unused_webpack_module, exports) { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createNoopMeter = exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = exports.NOOP_OBSERVABLE_GAUGE_METRIC = exports.NOOP_OBSERVABLE_COUNTER_METRIC = exports.NOOP_UP_DOWN_COUNTER_METRIC = exports.NOOP_HISTOGRAM_METRIC = exports.NOOP_COUNTER_METRIC = exports.NOOP_METER = exports.NoopObservableUpDownCounterMetric = exports.NoopObservableGaugeMetric = exports.NoopObservableCounterMetric = exports.NoopObservableMetric = exports.NoopHistogramMetric = exports.NoopUpDownCounterMetric = exports.NoopCounterMetric = exports.NoopMetric = exports.NoopMeter = void 0; +exports.UnaryCall = void 0; /** - * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses - * constant NoopMetrics for all of its methods. + * A unary RPC call. Unary means there is exactly one input message and + * exactly one output message unless an error occurred. */ -class NoopMeter { - constructor() { } - /** - * @see {@link Meter.createHistogram} - */ - createHistogram(_name, _options) { - return exports.NOOP_HISTOGRAM_METRIC; - } - /** - * @see {@link Meter.createCounter} - */ - createCounter(_name, _options) { - return exports.NOOP_COUNTER_METRIC; - } - /** - * @see {@link Meter.createUpDownCounter} - */ - createUpDownCounter(_name, _options) { - return exports.NOOP_UP_DOWN_COUNTER_METRIC; - } - /** - * @see {@link Meter.createObservableGauge} - */ - createObservableGauge(_name, _options) { - return exports.NOOP_OBSERVABLE_GAUGE_METRIC; +class UnaryCall { + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.request = request; + this.headers = headers; + this.response = response; + this.status = status; + this.trailers = trailers; } /** - * @see {@link Meter.createObservableCounter} + * If you are only interested in the final outcome of this call, + * you can await it to receive a `FinishedUnaryCall`. */ - createObservableCounter(_name, _options) { - return exports.NOOP_OBSERVABLE_COUNTER_METRIC; + then(onfulfilled, onrejected) { + return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } - /** - * @see {@link Meter.createObservableUpDownCounter} - */ - createObservableUpDownCounter(_name, _options) { - return exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; + promiseFinished() { + return __awaiter(this, void 0, void 0, function* () { + let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + request: this.request, + headers, + response, + status, + trailers + }; + }); } - /** - * @see {@link Meter.addBatchObservableCallback} - */ - addBatchObservableCallback(_callback, _observables) { } - /** - * @see {@link Meter.removeBatchObservableCallback} - */ - removeBatchObservableCallback(_callback) { } -} -exports.NoopMeter = NoopMeter; -class NoopMetric { -} -exports.NoopMetric = NoopMetric; -class NoopCounterMetric extends NoopMetric { - add(_value, _attributes) { } -} -exports.NoopCounterMetric = NoopCounterMetric; -class NoopUpDownCounterMetric extends NoopMetric { - add(_value, _attributes) { } -} -exports.NoopUpDownCounterMetric = NoopUpDownCounterMetric; -class NoopHistogramMetric extends NoopMetric { - record(_value, _attributes) { } } -exports.NoopHistogramMetric = NoopHistogramMetric; -class NoopObservableMetric { - addCallback(_callback) { } - removeCallback(_callback) { } -} -exports.NoopObservableMetric = NoopObservableMetric; -class NoopObservableCounterMetric extends NoopObservableMetric { -} -exports.NoopObservableCounterMetric = NoopObservableCounterMetric; -class NoopObservableGaugeMetric extends NoopObservableMetric { -} -exports.NoopObservableGaugeMetric = NoopObservableGaugeMetric; -class NoopObservableUpDownCounterMetric extends NoopObservableMetric { -} -exports.NoopObservableUpDownCounterMetric = NoopObservableUpDownCounterMetric; -exports.NOOP_METER = new NoopMeter(); -// Synchronous instruments -exports.NOOP_COUNTER_METRIC = new NoopCounterMetric(); -exports.NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); -exports.NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); -// Asynchronous instruments -exports.NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); -exports.NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); -exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); -/** - * Create a no-op Meter - */ -function createNoopMeter() { - return exports.NOOP_METER; -} -exports.createNoopMeter = createNoopMeter; -//# sourceMappingURL=NoopMeter.js.map +exports.UnaryCall = UnaryCall; + /***/ }), -/***/ 99682: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 31589: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NOOP_METER_PROVIDER = exports.NoopMeterProvider = void 0; -const NoopMeter_1 = __nccwpck_require__(2882); +exports.assertFloat32 = exports.assertUInt32 = exports.assertInt32 = exports.assertNever = exports.assert = void 0; /** - * An implementation of the {@link MeterProvider} which returns an impotent Meter - * for all calls to `getMeter` + * assert that condition is true or throw error (with message) */ -class NoopMeterProvider { - getMeter(_name, _version, _options) { - return NoopMeter_1.NOOP_METER; +function assert(condition, msg) { + if (!condition) { + throw new Error(msg); } } -exports.NoopMeterProvider = NoopMeterProvider; -exports.NOOP_METER_PROVIDER = new NoopMeterProvider(); -//# sourceMappingURL=NoopMeterProvider.js.map +exports.assert = assert; +/** + * assert that value cannot exist = type `never`. throw runtime error if it does. + */ +function assertNever(value, msg) { + throw new Error(msg !== null && msg !== void 0 ? msg : 'Unexpected object: ' + value); +} +exports.assertNever = assertNever; +const FLOAT32_MAX = 3.4028234663852886e+38, FLOAT32_MIN = -3.4028234663852886e+38, UINT32_MAX = 0xFFFFFFFF, INT32_MAX = 0X7FFFFFFF, INT32_MIN = -0X80000000; +function assertInt32(arg) { + if (typeof arg !== "number") + throw new Error('invalid int 32: ' + typeof arg); + if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) + throw new Error('invalid int 32: ' + arg); +} +exports.assertInt32 = assertInt32; +function assertUInt32(arg) { + if (typeof arg !== "number") + throw new Error('invalid uint 32: ' + typeof arg); + if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) + throw new Error('invalid uint 32: ' + arg); +} +exports.assertUInt32 = assertUInt32; +function assertFloat32(arg) { + if (typeof arg !== "number") + throw new Error('invalid float 32: ' + typeof arg); + if (!Number.isFinite(arg)) + return; + if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) + throw new Error('invalid float 32: ' + arg); +} +exports.assertFloat32 = assertFloat32; + /***/ }), -/***/ 43743: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 25081: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -__exportStar(__nccwpck_require__(10309), exports); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 43640: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports._globalThis = void 0; -/** only globals that common to node and browsers are allowed */ -// eslint-disable-next-line node/no-unsupported-features/es-builtins -exports._globalThis = typeof globalThis === 'object' ? globalThis : global; -//# sourceMappingURL=globalThis.js.map - -/***/ }), - -/***/ 10309: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -__exportStar(__nccwpck_require__(43640), exports); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 71241: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.propagation = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const propagation_1 = __nccwpck_require__(2596); -/** Entrypoint for propagation API */ -exports.propagation = propagation_1.PropagationAPI.getInstance(); -//# sourceMappingURL=propagation-api.js.map - -/***/ }), - -/***/ 98319: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 +exports.base64encode = exports.base64decode = void 0; +// lookup table from base64 character to byte +let encTable = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); +// lookup table from base64 character *code* to byte because lookup by number is fast +let decTable = []; +for (let i = 0; i < encTable.length; i++) + decTable[encTable[i].charCodeAt(0)] = i; +// support base64url variants +decTable["-".charCodeAt(0)] = encTable.indexOf("+"); +decTable["_".charCodeAt(0)] = encTable.indexOf("/"); +/** + * Decodes a base64 string to a byte array. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * - ignores white-space, including line breaks and tabs + * - allows inner padding (can decode concatenated base64 strings) + * - does not require padding + * - understands base64url encoding: + * "-" instead of "+", + * "_" instead of "/", + * no padding */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoopTextMapPropagator = void 0; +function base64decode(base64Str) { + // estimate byte size, not accounting for inner padding and whitespace + let es = base64Str.length * 3 / 4; + // if (es % 3 !== 0) + // throw new Error('invalid base64 string'); + if (base64Str[base64Str.length - 2] == '=') + es -= 2; + else if (base64Str[base64Str.length - 1] == '=') + es -= 1; + let bytes = new Uint8Array(es), bytePos = 0, // position in byte array + groupPos = 0, // position in base64 group + b, // current byte + p = 0 // previous byte + ; + for (let i = 0; i < base64Str.length; i++) { + b = decTable[base64Str.charCodeAt(i)]; + if (b === undefined) { + // noinspection FallThroughInSwitchStatementJS + switch (base64Str[i]) { + case '=': + groupPos = 0; // reset state when padding found + case '\n': + case '\r': + case '\t': + case ' ': + continue; // skip white-space, and padding + default: + throw Error(`invalid base64 string.`); + } + } + switch (groupPos) { + case 0: + p = b; + groupPos = 1; + break; + case 1: + bytes[bytePos++] = p << 2 | (b & 48) >> 4; + p = b; + groupPos = 2; + break; + case 2: + bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; + p = b; + groupPos = 3; + break; + case 3: + bytes[bytePos++] = (p & 3) << 6 | b; + groupPos = 0; + break; + } + } + if (groupPos == 1) + throw Error(`invalid base64 string.`); + return bytes.subarray(0, bytePos); +} +exports.base64decode = base64decode; /** - * No-op implementations of {@link TextMapPropagator}. + * Encodes a byte array to a base64 string. + * Adds padding at the end. + * Does not insert newlines. */ -class NoopTextMapPropagator { - /** Noop inject function does nothing */ - inject(_context, _carrier) { } - /** Noop extract function does nothing and returns the input context */ - extract(context, _carrier) { - return context; +function base64encode(bytes) { + let base64 = '', groupPos = 0, // position in base64 group + b, // current byte + p = 0; // carry over from previous byte + for (let i = 0; i < bytes.length; i++) { + b = bytes[i]; + switch (groupPos) { + case 0: + base64 += encTable[b >> 2]; + p = (b & 3) << 4; + groupPos = 1; + break; + case 1: + base64 += encTable[p | b >> 4]; + p = (b & 15) << 2; + groupPos = 2; + break; + case 2: + base64 += encTable[p | b >> 6]; + base64 += encTable[b & 63]; + groupPos = 0; + break; + } } - fields() { - return []; + // padding required? + if (groupPos) { + base64 += encTable[p]; + base64 += '='; + if (groupPos == 1) + base64 += '='; } + return base64; } -exports.NoopTextMapPropagator = NoopTextMapPropagator; -//# sourceMappingURL=NoopTextMapPropagator.js.map - -/***/ }), - -/***/ 60442: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; +exports.base64encode = base64encode; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.defaultTextMapSetter = exports.defaultTextMapGetter = void 0; -exports.defaultTextMapGetter = { - get(carrier, key) { - if (carrier == null) { - return undefined; - } - return carrier[key]; - }, - keys(carrier) { - if (carrier == null) { - return []; - } - return Object.keys(carrier); - }, -}; -exports.defaultTextMapSetter = { - set(carrier, key, value) { - if (carrier == null) { - return; - } - carrier[key] = value; - }, -}; -//# sourceMappingURL=TextMapPropagator.js.map /***/ }), -/***/ 10665: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 46158: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.trace = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const trace_1 = __nccwpck_require__(53675); -/** Entrypoint for trace API */ -exports.trace = trace_1.TraceAPI.getInstance(); -//# sourceMappingURL=trace-api.js.map - -/***/ }), - -/***/ 59894: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. +exports.WireType = exports.mergeBinaryOptions = exports.UnknownFieldHandler = void 0; +/** + * This handler implements the default behaviour for unknown fields. + * When reading data, unknown fields are stored on the message, in a + * symbol property. + * When writing data, the symbol property is queried and unknown fields + * are serialized into the output again. */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NonRecordingSpan = void 0; -const invalid_span_constants_1 = __nccwpck_require__(12999); +var UnknownFieldHandler; +(function (UnknownFieldHandler) { + /** + * The symbol used to store unknown fields for a message. + * The property must conform to `UnknownFieldContainer`. + */ + UnknownFieldHandler.symbol = Symbol.for("protobuf-ts/unknown"); + /** + * Store an unknown field during binary read directly on the message. + * This method is compatible with `BinaryReadOptions.readUnknownField`. + */ + UnknownFieldHandler.onRead = (typeName, message, fieldNo, wireType, data) => { + let container = is(message) ? message[UnknownFieldHandler.symbol] : message[UnknownFieldHandler.symbol] = []; + container.push({ no: fieldNo, wireType, data }); + }; + /** + * Write unknown fields stored for the message to the writer. + * This method is compatible with `BinaryWriteOptions.writeUnknownFields`. + */ + UnknownFieldHandler.onWrite = (typeName, message, writer) => { + for (let { no, wireType, data } of UnknownFieldHandler.list(message)) + writer.tag(no, wireType).raw(data); + }; + /** + * List unknown fields stored for the message. + * Note that there may be multiples fields with the same number. + */ + UnknownFieldHandler.list = (message, fieldNo) => { + if (is(message)) { + let all = message[UnknownFieldHandler.symbol]; + return fieldNo ? all.filter(uf => uf.no == fieldNo) : all; + } + return []; + }; + /** + * Returns the last unknown field by field number. + */ + UnknownFieldHandler.last = (message, fieldNo) => UnknownFieldHandler.list(message, fieldNo).slice(-1)[0]; + const is = (message) => message && Array.isArray(message[UnknownFieldHandler.symbol]); +})(UnknownFieldHandler = exports.UnknownFieldHandler || (exports.UnknownFieldHandler = {})); /** - * The NonRecordingSpan is the default {@link Span} that is used when no Span - * implementation is available. All operations are no-op including context - * propagation. + * Merges binary write or read options. Later values override earlier values. */ -class NonRecordingSpan { - constructor(_spanContext = invalid_span_constants_1.INVALID_SPAN_CONTEXT) { - this._spanContext = _spanContext; - } - // Returns a SpanContext. - spanContext() { - return this._spanContext; - } - // By default does nothing - setAttribute(_key, _value) { - return this; - } - // By default does nothing - setAttributes(_attributes) { - return this; - } - // By default does nothing - addEvent(_name, _attributes) { - return this; - } - // By default does nothing - setStatus(_status) { - return this; - } - // By default does nothing - updateName(_name) { - return this; - } - // By default does nothing - end(_endTime) { } - // isRecording always returns false for NonRecordingSpan. - isRecording() { - return false; - } - // By default does nothing - recordException(_exception, _time) { } +function mergeBinaryOptions(a, b) { + return Object.assign(Object.assign({}, a), b); } -exports.NonRecordingSpan = NonRecordingSpan; -//# sourceMappingURL=NonRecordingSpan.js.map - -/***/ }), - -/***/ 68427: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at +exports.mergeBinaryOptions = mergeBinaryOptions; +/** + * Protobuf binary format wire types. * - * https://www.apache.org/licenses/LICENSE-2.0 + * A wire type provides just enough information to find the length of the + * following value. * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * See https://developers.google.com/protocol-buffers/docs/encoding#structure */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoopTracer = void 0; -const context_1 = __nccwpck_require__(14430); -const context_utils_1 = __nccwpck_require__(59317); -const NonRecordingSpan_1 = __nccwpck_require__(59894); -const spancontext_utils_1 = __nccwpck_require__(73312); -const contextApi = context_1.ContextAPI.getInstance(); -/** - * No-op implementations of {@link Tracer}. - */ -class NoopTracer { - // startSpan starts a noop span. - startSpan(name, options, context = contextApi.active()) { - const root = Boolean(options === null || options === void 0 ? void 0 : options.root); - if (root) { - return new NonRecordingSpan_1.NonRecordingSpan(); - } - const parentFromContext = context && (0, context_utils_1.getSpanContext)(context); - if (isSpanContext(parentFromContext) && - (0, spancontext_utils_1.isSpanContextValid)(parentFromContext)) { - return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); - } - else { - return new NonRecordingSpan_1.NonRecordingSpan(); - } - } - startActiveSpan(name, arg2, arg3, arg4) { - let opts; - let ctx; - let fn; - if (arguments.length < 2) { - return; - } - else if (arguments.length === 2) { - fn = arg2; - } - else if (arguments.length === 3) { - opts = arg2; - fn = arg3; - } - else { - opts = arg2; - ctx = arg3; - fn = arg4; - } - const parentContext = ctx !== null && ctx !== void 0 ? ctx : contextApi.active(); - const span = this.startSpan(name, opts, parentContext); - const contextWithSpanSet = (0, context_utils_1.setSpan)(parentContext, span); - return contextApi.with(contextWithSpanSet, fn, undefined, span); - } -} -exports.NoopTracer = NoopTracer; -function isSpanContext(spanContext) { - return (typeof spanContext === 'object' && - typeof spanContext['spanId'] === 'string' && - typeof spanContext['traceId'] === 'string' && - typeof spanContext['traceFlags'] === 'number'); -} -//# sourceMappingURL=NoopTracer.js.map - -/***/ }), - -/***/ 13801: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +var WireType; +(function (WireType) { + /** + * Used for int32, int64, uint32, uint64, sint32, sint64, bool, enum + */ + WireType[WireType["Varint"] = 0] = "Varint"; + /** + * Used for fixed64, sfixed64, double. + * Always 8 bytes with little-endian byte order. + */ + WireType[WireType["Bit64"] = 1] = "Bit64"; + /** + * Used for string, bytes, embedded messages, packed repeated fields + * + * Only repeated numeric types (types which use the varint, 32-bit, + * or 64-bit wire types) can be packed. In proto3, such fields are + * packed by default. + */ + WireType[WireType["LengthDelimited"] = 2] = "LengthDelimited"; + /** + * Used for groups + * @deprecated + */ + WireType[WireType["StartGroup"] = 3] = "StartGroup"; + /** + * Used for groups + * @deprecated + */ + WireType[WireType["EndGroup"] = 4] = "EndGroup"; + /** + * Used for fixed32, sfixed32, float. + * Always 4 bytes with little-endian byte order. + */ + WireType[WireType["Bit32"] = 5] = "Bit32"; +})(WireType = exports.WireType || (exports.WireType = {})); -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.NoopTracerProvider = void 0; -const NoopTracer_1 = __nccwpck_require__(68427); -/** - * An implementation of the {@link TracerProvider} which returns an impotent - * Tracer for all calls to `getTracer`. - * - * All operations are no-op. - */ -class NoopTracerProvider { - getTracer(_name, _version, _options) { - return new NoopTracer_1.NoopTracer(); - } -} -exports.NoopTracerProvider = NoopTracerProvider; -//# sourceMappingURL=NoopTracerProvider.js.map /***/ }), -/***/ 48754: +/***/ 94672: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ProxyTracer = void 0; -const NoopTracer_1 = __nccwpck_require__(68427); -const NOOP_TRACER = new NoopTracer_1.NoopTracer(); +exports.BinaryReader = exports.binaryReadOptions = void 0; +const binary_format_contract_1 = __nccwpck_require__(46158); +const pb_long_1 = __nccwpck_require__(9569); +const goog_varint_1 = __nccwpck_require__(56568); +const defaultsRead = { + readUnknownField: true, + readerFactory: bytes => new BinaryReader(bytes), +}; /** - * Proxy tracer provided by the proxy tracer provider + * Make options for reading binary data form partial options. */ -class ProxyTracer { - constructor(_provider, name, version, options) { - this._provider = _provider; - this.name = name; - this.version = version; - this.options = options; - } - startSpan(name, options, context) { - return this._getTracer().startSpan(name, options, context); +function binaryReadOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; +} +exports.binaryReadOptions = binaryReadOptions; +class BinaryReader { + constructor(buf, textDecoder) { + this.varint64 = goog_varint_1.varint64read; // dirty cast for `this` + /** + * Read a `uint32` field, an unsigned 32 bit varint. + */ + this.uint32 = goog_varint_1.varint32read; // dirty cast for `this` and access to protected `buf` + this.buf = buf; + this.len = buf.length; + this.pos = 0; + this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { + fatal: true, + ignoreBOM: true, + }); } - startActiveSpan(_name, _options, _context, _fn) { - const tracer = this._getTracer(); - return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + /** + * Reads a tag - field number and wire type. + */ + tag() { + let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; + if (fieldNo <= 0 || wireType < 0 || wireType > 5) + throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); + return [fieldNo, wireType]; } /** - * Try to get a tracer from the proxy tracer provider. - * If the proxy tracer provider has no delegate, return a noop tracer. + * Skip one element on the wire and return the skipped data. + * Supports WireType.StartGroup since v2.0.0-alpha.23. */ - _getTracer() { - if (this._delegate) { - return this._delegate; - } - const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); - if (!tracer) { - return NOOP_TRACER; + skip(wireType) { + let start = this.pos; + // noinspection FallThroughInSwitchStatementJS + switch (wireType) { + case binary_format_contract_1.WireType.Varint: + while (this.buf[this.pos++] & 0x80) { + // ignore + } + break; + case binary_format_contract_1.WireType.Bit64: + this.pos += 4; + case binary_format_contract_1.WireType.Bit32: + this.pos += 4; + break; + case binary_format_contract_1.WireType.LengthDelimited: + let len = this.uint32(); + this.pos += len; + break; + case binary_format_contract_1.WireType.StartGroup: + // From descriptor.proto: Group type is deprecated, not supported in proto3. + // But we must still be able to parse and treat as unknown. + let t; + while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { + this.skip(t); + } + break; + default: + throw new Error("cant skip wire type " + wireType); } - this._delegate = tracer; - return this._delegate; + this.assertBounds(); + return this.buf.subarray(start, this.pos); } -} -exports.ProxyTracer = ProxyTracer; -//# sourceMappingURL=ProxyTracer.js.map - -/***/ }), - -/***/ 5064: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ProxyTracerProvider = void 0; -const ProxyTracer_1 = __nccwpck_require__(48754); -const NoopTracerProvider_1 = __nccwpck_require__(13801); -const NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); -/** - * Tracer provider which provides {@link ProxyTracer}s. - * - * Before a delegate is set, tracers provided are NoOp. - * When a delegate is set, traces are provided from the delegate. - * When a delegate is set after tracers have already been provided, - * all tracers already provided will use the provided delegate implementation. - */ -class ProxyTracerProvider { /** - * Get a {@link ProxyTracer} + * Throws error if position in byte array is out of range. */ - getTracer(name, version, options) { - var _a; - return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer_1.ProxyTracer(this, name, version, options)); + assertBounds() { + if (this.pos > this.len) + throw new RangeError("premature EOF"); } - getDelegate() { - var _a; - return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + /** + * Read a `int32` field, a signed 32 bit varint. + */ + int32() { + return this.uint32() | 0; } /** - * Set the delegate tracer provider + * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. */ - setDelegate(delegate) { - this._delegate = delegate; + sint32() { + let zze = this.uint32(); + // decode zigzag + return (zze >>> 1) ^ -(zze & 1); } - getDelegateTracer(name, version, options) { - var _a; - return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); + /** + * Read a `int64` field, a signed 64-bit varint. + */ + int64() { + return new pb_long_1.PbLong(...this.varint64()); } -} -exports.ProxyTracerProvider = ProxyTracerProvider; -//# sourceMappingURL=ProxyTracerProvider.js.map - -/***/ }), - -/***/ 26330: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SamplingDecision = void 0; -/** - * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. - * A sampling decision that determines how a {@link Span} will be recorded - * and collected. - */ -var SamplingDecision; -(function (SamplingDecision) { /** - * `Span.isRecording() === false`, span will not be recorded and all events - * and attributes will be dropped. + * Read a `uint64` field, an unsigned 64-bit varint. */ - SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + uint64() { + return new pb_long_1.PbULong(...this.varint64()); + } /** - * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} - * MUST NOT be set. + * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. */ - SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + sint64() { + let [lo, hi] = this.varint64(); + // decode zig zag + let s = -(lo & 1); + lo = ((lo >>> 1 | (hi & 1) << 31) ^ s); + hi = (hi >>> 1 ^ s); + return new pb_long_1.PbLong(lo, hi); + } /** - * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} - * MUST be set. + * Read a `bool` field, a variant. */ - SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; -})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {})); -//# sourceMappingURL=SamplingResult.js.map - -/***/ }), - -/***/ 59317: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSpanContext = exports.setSpanContext = exports.deleteSpan = exports.setSpan = exports.getActiveSpan = exports.getSpan = void 0; -const context_1 = __nccwpck_require__(41544); -const NonRecordingSpan_1 = __nccwpck_require__(59894); -const context_2 = __nccwpck_require__(14430); -/** - * span key - */ -const SPAN_KEY = (0, context_1.createContextKey)('OpenTelemetry Context Key SPAN'); -/** - * Return the span if one exists - * - * @param context context to get span from - */ -function getSpan(context) { - return context.getValue(SPAN_KEY) || undefined; -} -exports.getSpan = getSpan; -/** - * Gets the span from the current context, if one exists. - */ -function getActiveSpan() { - return getSpan(context_2.ContextAPI.getInstance().active()); -} -exports.getActiveSpan = getActiveSpan; -/** - * Set the span on a context - * - * @param context context to use as parent - * @param span span to set active - */ -function setSpan(context, span) { - return context.setValue(SPAN_KEY, span); -} -exports.setSpan = setSpan; -/** - * Remove current span stored in the context - * - * @param context context to delete span from - */ -function deleteSpan(context) { - return context.deleteValue(SPAN_KEY); -} -exports.deleteSpan = deleteSpan; -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context context to set active span on - * @param spanContext span context to be wrapped - */ -function setSpanContext(context, spanContext) { - return setSpan(context, new NonRecordingSpan_1.NonRecordingSpan(spanContext)); -} -exports.setSpanContext = setSpanContext; -/** - * Get the span context of the span if it exists. - * - * @param context context to get values from - */ -function getSpanContext(context) { - var _a; - return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); -} -exports.getSpanContext = getSpanContext; -//# sourceMappingURL=context-utils.js.map - -/***/ }), - -/***/ 25760: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TraceStateImpl = void 0; -const tracestate_validators_1 = __nccwpck_require__(87655); -const MAX_TRACE_STATE_ITEMS = 32; -const MAX_TRACE_STATE_LEN = 512; -const LIST_MEMBERS_SEPARATOR = ','; -const LIST_MEMBER_KEY_VALUE_SPLITTER = '='; -/** - * TraceState must be a class and not a simple object type because of the spec - * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). - * - * Here is the list of allowed mutations: - * - New key-value pair should be added into the beginning of the list - * - The value of any key can be updated. Modified keys MUST be moved to the - * beginning of the list. - */ -class TraceStateImpl { - constructor(rawTraceState) { - this._internalState = new Map(); - if (rawTraceState) - this._parse(rawTraceState); - } - set(key, value) { - // TODO: Benchmark the different approaches(map vs list) and - // use the faster one. - const traceState = this._clone(); - if (traceState._internalState.has(key)) { - traceState._internalState.delete(key); - } - traceState._internalState.set(key, value); - return traceState; - } - unset(key) { - const traceState = this._clone(); - traceState._internalState.delete(key); - return traceState; - } - get(key) { - return this._internalState.get(key); - } - serialize() { - return this._keys() - .reduce((agg, key) => { - agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key)); - return agg; - }, []) - .join(LIST_MEMBERS_SEPARATOR); - } - _parse(rawTraceState) { - if (rawTraceState.length > MAX_TRACE_STATE_LEN) - return; - this._internalState = rawTraceState - .split(LIST_MEMBERS_SEPARATOR) - .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning - .reduce((agg, part) => { - const listMember = part.trim(); // Optional Whitespace (OWS) handling - const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); - if (i !== -1) { - const key = listMember.slice(0, i); - const value = listMember.slice(i + 1, part.length); - if ((0, tracestate_validators_1.validateKey)(key) && (0, tracestate_validators_1.validateValue)(value)) { - agg.set(key, value); - } - else { - // TODO: Consider to add warning log - } - } - return agg; - }, new Map()); - // Because of the reverse() requirement, trunc must be done after map is created - if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { - this._internalState = new Map(Array.from(this._internalState.entries()) - .reverse() // Use reverse same as original tracestate parse chain - .slice(0, MAX_TRACE_STATE_ITEMS)); - } + bool() { + let [lo, hi] = this.varint64(); + return lo !== 0 || hi !== 0; } - _keys() { - return Array.from(this._internalState.keys()).reverse(); + /** + * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. + */ + fixed32() { + return this.view.getUint32((this.pos += 4) - 4, true); } - _clone() { - const traceState = new TraceStateImpl(); - traceState._internalState = new Map(this._internalState); - return traceState; + /** + * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. + */ + sfixed32() { + return this.view.getInt32((this.pos += 4) - 4, true); + } + /** + * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. + */ + fixed64() { + return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); + } + /** + * Read a `fixed64` field, a signed, fixed-length 64-bit integer. + */ + sfixed64() { + return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); } -} -exports.TraceStateImpl = TraceStateImpl; -//# sourceMappingURL=tracestate-impl.js.map - -/***/ }), - -/***/ 87655: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.validateValue = exports.validateKey = void 0; -const VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; -const VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`; -const VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`; -const VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`); -const VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; -const INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; -/** - * Key is opaque string up to 256 characters printable. It MUST begin with a - * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, - * underscores _, dashes -, asterisks *, and forward slashes /. - * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the - * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. - * see https://www.w3.org/TR/trace-context/#key - */ -function validateKey(key) { - return VALID_KEY_REGEX.test(key); -} -exports.validateKey = validateKey; -/** - * Value is opaque string up to 256 characters printable ASCII RFC0020 - * characters (i.e., the range 0x20 to 0x7E) except comma , and =. - */ -function validateValue(value) { - return (VALID_VALUE_BASE_REGEX.test(value) && - !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); -} -exports.validateValue = validateValue; -//# sourceMappingURL=tracestate-validators.js.map - -/***/ }), - -/***/ 9840: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createTraceState = void 0; -const tracestate_impl_1 = __nccwpck_require__(25760); -function createTraceState(rawTraceState) { - return new tracestate_impl_1.TraceStateImpl(rawTraceState); -} -exports.createTraceState = createTraceState; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 12999: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; -const trace_flags_1 = __nccwpck_require__(56140); -exports.INVALID_SPANID = '0000000000000000'; -exports.INVALID_TRACEID = '00000000000000000000000000000000'; -exports.INVALID_SPAN_CONTEXT = { - traceId: exports.INVALID_TRACEID, - spanId: exports.INVALID_SPANID, - traceFlags: trace_flags_1.TraceFlags.NONE, -}; -//# sourceMappingURL=invalid-span-constants.js.map - -/***/ }), - -/***/ 71645: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SpanKind = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var SpanKind; -(function (SpanKind) { - /** Default value. Indicates that the span is used internally. */ - SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; /** - * Indicates that the span covers server-side handling of an RPC or other - * remote request. + * Read a `float` field, 32-bit floating point number. */ - SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + float() { + return this.view.getFloat32((this.pos += 4) - 4, true); + } /** - * Indicates that the span covers the client-side wrapper around an RPC or - * other remote request. + * Read a `double` field, a 64-bit floating point number. */ - SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + double() { + return this.view.getFloat64((this.pos += 8) - 8, true); + } /** - * Indicates that the span describes producer sending a message to a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. + * Read a `bytes` field, length-delimited arbitrary data. */ - SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + bytes() { + let len = this.uint32(); + let start = this.pos; + this.pos += len; + this.assertBounds(); + return this.buf.subarray(start, start + len); + } /** - * Indicates that the span describes consumer receiving a message from a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. + * Read a `string` field, length-delimited data converted to UTF-8 text. */ - SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; -})(SpanKind = exports.SpanKind || (exports.SpanKind = {})); -//# sourceMappingURL=span_kind.js.map + string() { + return this.textDecoder.decode(this.bytes()); + } +} +exports.BinaryReader = BinaryReader; + /***/ }), -/***/ 73312: +/***/ 34474: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -const invalid_span_constants_1 = __nccwpck_require__(12999); -const NonRecordingSpan_1 = __nccwpck_require__(59894); -const VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; -const VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; -function isValidTraceId(traceId) { - return VALID_TRACEID_REGEX.test(traceId) && traceId !== invalid_span_constants_1.INVALID_TRACEID; -} -exports.isValidTraceId = isValidTraceId; -function isValidSpanId(spanId) { - return VALID_SPANID_REGEX.test(spanId) && spanId !== invalid_span_constants_1.INVALID_SPANID; -} -exports.isValidSpanId = isValidSpanId; -/** - * Returns true if this {@link SpanContext} is valid. - * @return true if this {@link SpanContext} is valid. - */ -function isSpanContextValid(spanContext) { - return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); -} -exports.isSpanContextValid = isSpanContextValid; +exports.BinaryWriter = exports.binaryWriteOptions = void 0; +const pb_long_1 = __nccwpck_require__(9569); +const goog_varint_1 = __nccwpck_require__(56568); +const assert_1 = __nccwpck_require__(31589); +const defaultsWrite = { + writeUnknownFields: true, + writerFactory: () => new BinaryWriter(), +}; /** - * Wrap the given {@link SpanContext} in a new non-recording {@link Span} - * - * @param spanContext span context to be wrapped - * @returns a new non-recording {@link Span} with the provided context + * Make options for writing binary data form partial options. */ -function wrapSpanContext(spanContext) { - return new NonRecordingSpan_1.NonRecordingSpan(spanContext); +function binaryWriteOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; } -exports.wrapSpanContext = wrapSpanContext; -//# sourceMappingURL=spancontext-utils.js.map - -/***/ }), - -/***/ 34483: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.SpanStatusCode = void 0; -/** - * An enumeration of status codes. - */ -var SpanStatusCode; -(function (SpanStatusCode) { +exports.binaryWriteOptions = binaryWriteOptions; +class BinaryWriter { + constructor(textEncoder) { + /** + * Previous fork states. + */ + this.stack = []; + this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); + this.chunks = []; + this.buf = []; + } /** - * The default status. + * Return all bytes written and reset this writer. */ - SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + finish() { + this.chunks.push(new Uint8Array(this.buf)); // flush the buffer + let len = 0; + for (let i = 0; i < this.chunks.length; i++) + len += this.chunks[i].length; + let bytes = new Uint8Array(len); + let offset = 0; + for (let i = 0; i < this.chunks.length; i++) { + bytes.set(this.chunks[i], offset); + offset += this.chunks[i].length; + } + this.chunks = []; + return bytes; + } /** - * The operation has been validated by an Application developer or - * Operator to have completed successfully. + * Start a new fork for length-delimited data like a message + * or a packed repeated field. + * + * Must be joined later with `join()`. */ - SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + fork() { + this.stack.push({ chunks: this.chunks, buf: this.buf }); + this.chunks = []; + this.buf = []; + return this; + } /** - * The operation contains an error. + * Join the last fork. Write its length and bytes, then + * return to the previous state. */ - SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; -})(SpanStatusCode = exports.SpanStatusCode || (exports.SpanStatusCode = {})); -//# sourceMappingURL=status.js.map - -/***/ }), - -/***/ 56140: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TraceFlags = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var TraceFlags; -(function (TraceFlags) { - /** Represents no flag set. */ - TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; - /** Bit to represent whether trace is sampled in trace flags. */ - TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; -})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {})); -//# sourceMappingURL=trace_flags.js.map - -/***/ }), - -/***/ 70628: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.VERSION = void 0; -// this is autogenerated file, see scripts/version-update.js -exports.VERSION = '1.8.0'; -//# sourceMappingURL=version.js.map - -/***/ }), - -/***/ 8376: -/***/ (function(__unused_webpack_module, exports) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ClientStreamingCall = void 0; -/** - * A client streaming RPC call. This means that the clients sends 0, 1, or - * more messages to the server, and the server replies with exactly one - * message. - */ -class ClientStreamingCall { - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.requests = request; - this.headers = headers; - this.response = response; - this.status = status; - this.trailers = trailers; + join() { + // get chunk of fork + let chunk = this.finish(); + // restore previous state + let prev = this.stack.pop(); + if (!prev) + throw new Error('invalid state, fork stack empty'); + this.chunks = prev.chunks; + this.buf = prev.buf; + // write length of chunk as varint + this.uint32(chunk.byteLength); + return this.raw(chunk); } /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * Note that it may still be valid to send more request messages. + * Writes a tag (field number and wire type). + * + * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. + * + * Generated code should compute the tag ahead of time and call `uint32()`. */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + tag(fieldNo, type) { + return this.uint32((fieldNo << 3 | type) >>> 0); } - promiseFinished() { - return __awaiter(this, void 0, void 0, function* () { - let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - headers, - response, - status, - trailers - }; - }); + /** + * Write a chunk of raw bytes. + */ + raw(chunk) { + if (this.buf.length) { + this.chunks.push(new Uint8Array(this.buf)); + this.buf = []; + } + this.chunks.push(chunk); + return this; } -} -exports.ClientStreamingCall = ClientStreamingCall; - - -/***/ }), - -/***/ 56649: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Deferred = exports.DeferredState = void 0; -var DeferredState; -(function (DeferredState) { - DeferredState[DeferredState["PENDING"] = 0] = "PENDING"; - DeferredState[DeferredState["REJECTED"] = 1] = "REJECTED"; - DeferredState[DeferredState["RESOLVED"] = 2] = "RESOLVED"; -})(DeferredState = exports.DeferredState || (exports.DeferredState = {})); -/** - * A deferred promise. This is a "controller" for a promise, which lets you - * pass a promise around and reject or resolve it from the outside. - * - * Warning: This class is to be used with care. Using it can make code very - * difficult to read. It is intended for use in library code that exposes - * promises, not for regular business logic. - */ -class Deferred { /** - * @param preventUnhandledRejectionWarning - prevents the warning - * "Unhandled Promise rejection" by adding a noop rejection handler. - * Working with calls returned from the runtime-rpc package in an - * async function usually means awaiting one call property after - * the other. This means that the "status" is not being awaited when - * an earlier await for the "headers" is rejected. This causes the - * "unhandled promise reject" warning. A more correct behaviour for - * calls might be to become aware whether at least one of the - * promises is handled and swallow the rejection warning for the - * others. + * Write a `uint32` value, an unsigned 32 bit varint. */ - constructor(preventUnhandledRejectionWarning = true) { - this._state = DeferredState.PENDING; - this._promise = new Promise((resolve, reject) => { - this._resolve = resolve; - this._reject = reject; - }); - if (preventUnhandledRejectionWarning) { - this._promise.catch(_ => { }); + uint32(value) { + assert_1.assertUInt32(value); + // write value as varint 32, inlined for speed + while (value > 0x7f) { + this.buf.push((value & 0x7f) | 0x80); + value = value >>> 7; } + this.buf.push(value); + return this; } /** - * Get the current state of the promise. + * Write a `int32` value, a signed 32 bit varint. */ - get state() { - return this._state; + int32(value) { + assert_1.assertInt32(value); + goog_varint_1.varint32write(value, this.buf); + return this; } /** - * Get the deferred promise. + * Write a `bool` value, a variant. */ - get promise() { - return this._promise; + bool(value) { + this.buf.push(value ? 1 : 0); + return this; } /** - * Resolve the promise. Throws if the promise is already resolved or rejected. + * Write a `bytes` value, length-delimited arbitrary data. */ - resolve(value) { - if (this.state !== DeferredState.PENDING) - throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); - this._resolve(value); - this._state = DeferredState.RESOLVED; + bytes(value) { + this.uint32(value.byteLength); // write length of chunk as varint + return this.raw(value); } /** - * Reject the promise. Throws if the promise is already resolved or rejected. + * Write a `string` value, length-delimited data converted to UTF-8 text. */ - reject(reason) { - if (this.state !== DeferredState.PENDING) - throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); - this._reject(reason); - this._state = DeferredState.REJECTED; + string(value) { + let chunk = this.textEncoder.encode(value); + this.uint32(chunk.byteLength); // write length of chunk as varint + return this.raw(chunk); } /** - * Resolve the promise. Ignore if not pending. + * Write a `float` value, 32-bit floating point number. */ - resolvePending(val) { - if (this._state === DeferredState.PENDING) - this.resolve(val); + float(value) { + assert_1.assertFloat32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setFloat32(0, value, true); + return this.raw(chunk); } /** - * Reject the promise. Ignore if not pending. + * Write a `double` value, a 64-bit floating point number. */ - rejectPending(reason) { - if (this._state === DeferredState.PENDING) - this.reject(reason); + double(value) { + let chunk = new Uint8Array(8); + new DataView(chunk.buffer).setFloat64(0, value, true); + return this.raw(chunk); } -} -exports.Deferred = Deferred; - - -/***/ }), - -/***/ 76574: -/***/ (function(__unused_webpack_module, exports) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.DuplexStreamingCall = void 0; -/** - * A duplex streaming RPC call. This means that the clients sends an - * arbitrary amount of messages to the server, while at the same time, - * the server sends an arbitrary amount of messages to the client. - */ -class DuplexStreamingCall { - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.requests = request; - this.headers = headers; - this.responses = response; - this.status = status; - this.trailers = trailers; + /** + * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. + */ + fixed32(value) { + assert_1.assertUInt32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setUint32(0, value, true); + return this.raw(chunk); } /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * Note that it may still be valid to send more request messages. + * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + sfixed32(value) { + assert_1.assertInt32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setInt32(0, value, true); + return this.raw(chunk); } - promiseFinished() { - return __awaiter(this, void 0, void 0, function* () { - let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - headers, - status, - trailers, - }; - }); - } -} -exports.DuplexStreamingCall = DuplexStreamingCall; - - -/***/ }), - -/***/ 40543: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -// Public API of the rpc runtime. -// Note: we do not use `export * from ...` to help tree shakers, -// webpack verbose output hints that this should be useful -Object.defineProperty(exports, "__esModule", ({ value: true })); -var service_type_1 = __nccwpck_require__(6706); -Object.defineProperty(exports, "ServiceType", ({ enumerable: true, get: function () { return service_type_1.ServiceType; } })); -var reflection_info_1 = __nccwpck_require__(50748); -Object.defineProperty(exports, "readMethodOptions", ({ enumerable: true, get: function () { return reflection_info_1.readMethodOptions; } })); -Object.defineProperty(exports, "readMethodOption", ({ enumerable: true, get: function () { return reflection_info_1.readMethodOption; } })); -Object.defineProperty(exports, "readServiceOption", ({ enumerable: true, get: function () { return reflection_info_1.readServiceOption; } })); -var rpc_error_1 = __nccwpck_require__(98785); -Object.defineProperty(exports, "RpcError", ({ enumerable: true, get: function () { return rpc_error_1.RpcError; } })); -var rpc_options_1 = __nccwpck_require__(15580); -Object.defineProperty(exports, "mergeRpcOptions", ({ enumerable: true, get: function () { return rpc_options_1.mergeRpcOptions; } })); -var rpc_output_stream_1 = __nccwpck_require__(56092); -Object.defineProperty(exports, "RpcOutputStreamController", ({ enumerable: true, get: function () { return rpc_output_stream_1.RpcOutputStreamController; } })); -var test_transport_1 = __nccwpck_require__(51527); -Object.defineProperty(exports, "TestTransport", ({ enumerable: true, get: function () { return test_transport_1.TestTransport; } })); -var deferred_1 = __nccwpck_require__(56649); -Object.defineProperty(exports, "Deferred", ({ enumerable: true, get: function () { return deferred_1.Deferred; } })); -Object.defineProperty(exports, "DeferredState", ({ enumerable: true, get: function () { return deferred_1.DeferredState; } })); -var duplex_streaming_call_1 = __nccwpck_require__(76574); -Object.defineProperty(exports, "DuplexStreamingCall", ({ enumerable: true, get: function () { return duplex_streaming_call_1.DuplexStreamingCall; } })); -var client_streaming_call_1 = __nccwpck_require__(8376); -Object.defineProperty(exports, "ClientStreamingCall", ({ enumerable: true, get: function () { return client_streaming_call_1.ClientStreamingCall; } })); -var server_streaming_call_1 = __nccwpck_require__(81564); -Object.defineProperty(exports, "ServerStreamingCall", ({ enumerable: true, get: function () { return server_streaming_call_1.ServerStreamingCall; } })); -var unary_call_1 = __nccwpck_require__(34882); -Object.defineProperty(exports, "UnaryCall", ({ enumerable: true, get: function () { return unary_call_1.UnaryCall; } })); -var rpc_interceptor_1 = __nccwpck_require__(92039); -Object.defineProperty(exports, "stackIntercept", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackIntercept; } })); -Object.defineProperty(exports, "stackDuplexStreamingInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackDuplexStreamingInterceptors; } })); -Object.defineProperty(exports, "stackClientStreamingInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackClientStreamingInterceptors; } })); -Object.defineProperty(exports, "stackServerStreamingInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackServerStreamingInterceptors; } })); -Object.defineProperty(exports, "stackUnaryInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackUnaryInterceptors; } })); -var server_call_context_1 = __nccwpck_require__(80579); -Object.defineProperty(exports, "ServerCallContextController", ({ enumerable: true, get: function () { return server_call_context_1.ServerCallContextController; } })); - - -/***/ }), - -/***/ 50748: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.readServiceOption = exports.readMethodOption = exports.readMethodOptions = exports.normalizeMethodInfo = void 0; -const runtime_1 = __nccwpck_require__(35435); -/** - * Turns PartialMethodInfo into MethodInfo. - */ -function normalizeMethodInfo(method, service) { - var _a, _b, _c; - let m = method; - m.service = service; - m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); - // noinspection PointlessBooleanExpressionJS - m.serverStreaming = !!m.serverStreaming; - // noinspection PointlessBooleanExpressionJS - m.clientStreaming = !!m.clientStreaming; - m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; - m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : undefined; - return m; -} -exports.normalizeMethodInfo = normalizeMethodInfo; -/** - * Read custom method options from a generated service client. - * - * @deprecated use readMethodOption() - */ -function readMethodOptions(service, methodName, extensionName, extensionType) { - var _a; - const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; - return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : undefined; -} -exports.readMethodOptions = readMethodOptions; -function readMethodOption(service, methodName, extensionName, extensionType) { - var _a; - const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; - if (!options) { - return undefined; + /** + * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. + */ + sint32(value) { + assert_1.assertInt32(value); + // zigzag encode + value = ((value << 1) ^ (value >> 31)) >>> 0; + goog_varint_1.varint32write(value, this.buf); + return this; } - const optionVal = options[extensionName]; - if (optionVal === undefined) { - return optionVal; + /** + * Write a `fixed64` value, a signed, fixed-length 64-bit integer. + */ + sfixed64(value) { + let chunk = new Uint8Array(8); + let view = new DataView(chunk.buffer); + let long = pb_long_1.PbLong.from(value); + view.setInt32(0, long.lo, true); + view.setInt32(4, long.hi, true); + return this.raw(chunk); } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; -} -exports.readMethodOption = readMethodOption; -function readServiceOption(service, extensionName, extensionType) { - const options = service.options; - if (!options) { - return undefined; + /** + * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. + */ + fixed64(value) { + let chunk = new Uint8Array(8); + let view = new DataView(chunk.buffer); + let long = pb_long_1.PbULong.from(value); + view.setInt32(0, long.lo, true); + view.setInt32(4, long.hi, true); + return this.raw(chunk); } - const optionVal = options[extensionName]; - if (optionVal === undefined) { - return optionVal; + /** + * Write a `int64` value, a signed 64-bit varint. + */ + int64(value) { + let long = pb_long_1.PbLong.from(value); + goog_varint_1.varint64write(long.lo, long.hi, this.buf); + return this; } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; -} -exports.readServiceOption = readServiceOption; - - -/***/ }), - -/***/ 98785: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.RpcError = void 0; -/** - * An error that occurred while calling a RPC method. - */ -class RpcError extends Error { - constructor(message, code = 'UNKNOWN', meta) { - super(message); - this.name = 'RpcError'; - // see https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-2.html#example - Object.setPrototypeOf(this, new.target.prototype); - this.code = code; - this.meta = meta !== null && meta !== void 0 ? meta : {}; + /** + * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. + */ + sint64(value) { + let long = pb_long_1.PbLong.from(value), + // zigzag encode + sign = long.hi >> 31, lo = (long.lo << 1) ^ sign, hi = ((long.hi << 1) | (long.lo >>> 31)) ^ sign; + goog_varint_1.varint64write(lo, hi, this.buf); + return this; } - toString() { - const l = [this.name + ': ' + this.message]; - if (this.code) { - l.push(''); - l.push('Code: ' + this.code); - } - if (this.serviceName && this.methodName) { - l.push('Method: ' + this.serviceName + '/' + this.methodName); - } - let m = Object.entries(this.meta); - if (m.length) { - l.push(''); - l.push('Meta:'); - for (let [k, v] of m) { - l.push(` ${k}: ${v}`); - } - } - return l.join('\n'); + /** + * Write a `uint64` value, an unsigned 64-bit varint. + */ + uint64(value) { + let long = pb_long_1.PbULong.from(value); + goog_varint_1.varint64write(long.lo, long.hi, this.buf); + return this; } } -exports.RpcError = RpcError; +exports.BinaryWriter = BinaryWriter; /***/ }), -/***/ 92039: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 76802: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.stackDuplexStreamingInterceptors = exports.stackClientStreamingInterceptors = exports.stackServerStreamingInterceptors = exports.stackUnaryInterceptors = exports.stackIntercept = void 0; -const runtime_1 = __nccwpck_require__(35435); +exports.listEnumNumbers = exports.listEnumNames = exports.listEnumValues = exports.isEnumObject = void 0; /** - * Creates a "stack" of of all interceptors specified in the given `RpcOptions`. - * Used by generated client implementations. - * @internal + * Is this a lookup object generated by Typescript, for a Typescript enum + * generated by protobuf-ts? + * + * - No `const enum` (enum must not be inlined, we need reverse mapping). + * - No string enum (we need int32 for protobuf). + * - Must have a value for 0 (otherwise, we would need to support custom default values). */ -function stackIntercept(kind, transport, method, options, input) { - var _a, _b, _c, _d; - if (kind == "unary") { - let tail = (mtd, inp, opt) => transport.unary(mtd, inp, opt); - for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter(i => i.interceptUnary).reverse()) { - const next = tail; - tail = (mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt); - } - return tail(method, input, options); +function isEnumObject(arg) { + if (typeof arg != 'object' || arg === null) { + return false; } - if (kind == "serverStreaming") { - let tail = (mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt); - for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter(i => i.interceptServerStreaming).reverse()) { - const next = tail; - tail = (mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt); - } - return tail(method, input, options); + if (!arg.hasOwnProperty(0)) { + return false; } - if (kind == "clientStreaming") { - let tail = (mtd, opt) => transport.clientStreaming(mtd, opt); - for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter(i => i.interceptClientStreaming).reverse()) { - const next = tail; - tail = (mtd, opt) => curr.interceptClientStreaming(next, mtd, opt); + for (let k of Object.keys(arg)) { + let num = parseInt(k); + if (!Number.isNaN(num)) { + // is there a name for the number? + let nam = arg[num]; + if (nam === undefined) + return false; + // does the name resolve back to the number? + if (arg[nam] !== num) + return false; } - return tail(method, options); - } - if (kind == "duplex") { - let tail = (mtd, opt) => transport.duplex(mtd, opt); - for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter(i => i.interceptDuplex).reverse()) { - const next = tail; - tail = (mtd, opt) => curr.interceptDuplex(next, mtd, opt); + else { + // is there a number for the name? + let num = arg[k]; + if (num === undefined) + return false; + // is it a string enum? + if (typeof num !== 'number') + return false; + // do we know the number? + if (arg[num] === undefined) + return false; } - return tail(method, options); } - runtime_1.assertNever(kind); -} -exports.stackIntercept = stackIntercept; -/** - * @deprecated replaced by `stackIntercept()`, still here to support older generated code - */ -function stackUnaryInterceptors(transport, method, input, options) { - return stackIntercept("unary", transport, method, options, input); + return true; } -exports.stackUnaryInterceptors = stackUnaryInterceptors; +exports.isEnumObject = isEnumObject; /** - * @deprecated replaced by `stackIntercept()`, still here to support older generated code + * Lists all values of a Typescript enum, as an array of objects with a "name" + * property and a "number" property. + * + * Note that it is possible that a number appears more than once, because it is + * possible to have aliases in an enum. + * + * Throws if the enum does not adhere to the rules of enums generated by + * protobuf-ts. See `isEnumObject()`. */ -function stackServerStreamingInterceptors(transport, method, input, options) { - return stackIntercept("serverStreaming", transport, method, options, input); +function listEnumValues(enumObject) { + if (!isEnumObject(enumObject)) + throw new Error("not a typescript enum object"); + let values = []; + for (let [name, number] of Object.entries(enumObject)) + if (typeof number == "number") + values.push({ name, number }); + return values; } -exports.stackServerStreamingInterceptors = stackServerStreamingInterceptors; +exports.listEnumValues = listEnumValues; /** - * @deprecated replaced by `stackIntercept()`, still here to support older generated code + * Lists the names of a Typescript enum. + * + * Throws if the enum does not adhere to the rules of enums generated by + * protobuf-ts. See `isEnumObject()`. */ -function stackClientStreamingInterceptors(transport, method, options) { - return stackIntercept("clientStreaming", transport, method, options); +function listEnumNames(enumObject) { + return listEnumValues(enumObject).map(val => val.name); } -exports.stackClientStreamingInterceptors = stackClientStreamingInterceptors; +exports.listEnumNames = listEnumNames; /** - * @deprecated replaced by `stackIntercept()`, still here to support older generated code + * Lists the numbers of a Typescript enum. + * + * Throws if the enum does not adhere to the rules of enums generated by + * protobuf-ts. See `isEnumObject()`. */ -function stackDuplexStreamingInterceptors(transport, method, options) { - return stackIntercept("duplex", transport, method, options); +function listEnumNumbers(enumObject) { + return listEnumValues(enumObject) + .map(val => val.number) + .filter((num, index, arr) => arr.indexOf(num) == index); } -exports.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; +exports.listEnumNumbers = listEnumNumbers; /***/ }), -/***/ 15580: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 56568: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +// Copyright 2008 Google Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Code generated by the Protocol Buffer compiler is owned by the owner +// of the input file used when generating it. This code is not +// standalone and requires a support library to be linked with it. This +// support library is itself covered by the above license. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.mergeRpcOptions = void 0; -const runtime_1 = __nccwpck_require__(35435); +exports.varint32read = exports.varint32write = exports.int64toString = exports.int64fromString = exports.varint64write = exports.varint64read = void 0; /** - * Merges custom RPC options with defaults. Returns a new instance and keeps - * the "defaults" and the "options" unmodified. - * - * Merges `RpcMetadata` "meta", overwriting values from "defaults" with - * values from "options". Does not append values to existing entries. - * - * Merges "jsonOptions", including "jsonOptions.typeRegistry", by creating - * a new array that contains types from "options.jsonOptions.typeRegistry" - * first, then types from "defaults.jsonOptions.typeRegistry". + * Read a 64 bit varint as two JS numbers. * - * Merges "binaryOptions". + * Returns tuple: + * [0]: low bits + * [0]: high bits * - * Merges "interceptors" by creating a new array that contains interceptors - * from "defaults" first, then interceptors from "options". + * Copyright 2008 Google Inc. All rights reserved. * - * Works with objects that extend `RpcOptions`, but only if the added - * properties are of type Date, primitive like string, boolean, or Array - * of primitives. If you have other property types, you have to merge them - * yourself. + * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L175 */ -function mergeRpcOptions(defaults, options) { - if (!options) - return defaults; - let o = {}; - copy(defaults, o); - copy(options, o); - for (let key of Object.keys(options)) { - let val = options[key]; - switch (key) { - case "jsonOptions": - o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); - break; - case "binaryOptions": - o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); - break; - case "meta": - o.meta = {}; - copy(defaults.meta, o.meta); - copy(options.meta, o.meta); - break; - case "interceptors": - o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); - break; +function varint64read() { + let lowBits = 0; + let highBits = 0; + for (let shift = 0; shift < 28; shift += 7) { + let b = this.buf[this.pos++]; + lowBits |= (b & 0x7F) << shift; + if ((b & 0x80) == 0) { + this.assertBounds(); + return [lowBits, highBits]; } } - return o; -} -exports.mergeRpcOptions = mergeRpcOptions; -function copy(a, into) { - if (!a) - return; - let c = into; - for (let [k, v] of Object.entries(a)) { - if (v instanceof Date) - c[k] = new Date(v.getTime()); - else if (Array.isArray(v)) - c[k] = v.concat(); - else - c[k] = v; + let middleByte = this.buf[this.pos++]; + // last four bits of the first 32 bit number + lowBits |= (middleByte & 0x0F) << 28; + // 3 upper bits are part of the next 32 bit number + highBits = (middleByte & 0x70) >> 4; + if ((middleByte & 0x80) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } + for (let shift = 3; shift <= 31; shift += 7) { + let b = this.buf[this.pos++]; + highBits |= (b & 0x7F) << shift; + if ((b & 0x80) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } } + throw new Error('invalid varint'); } - - -/***/ }), - -/***/ 56092: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.RpcOutputStreamController = void 0; -const deferred_1 = __nccwpck_require__(56649); -const runtime_1 = __nccwpck_require__(35435); +exports.varint64read = varint64read; /** - * A `RpcOutputStream` that you control. + * Write a 64 bit varint, given as two JS numbers, to the given bytes array. + * + * Copyright 2008 Google Inc. All rights reserved. + * + * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/writer.js#L344 */ -class RpcOutputStreamController { - constructor() { - this._lis = { - nxt: [], - msg: [], - err: [], - cmp: [], - }; - this._closed = false; - } - // --- RpcOutputStream callback API - onNext(callback) { - return this.addLis(callback, this._lis.nxt); - } - onMessage(callback) { - return this.addLis(callback, this._lis.msg); - } - onError(callback) { - return this.addLis(callback, this._lis.err); - } - onComplete(callback) { - return this.addLis(callback, this._lis.cmp); - } - addLis(callback, list) { - list.push(callback); - return () => { - let i = list.indexOf(callback); - if (i >= 0) - list.splice(i, 1); - }; +function varint64write(lo, hi, bytes) { + for (let i = 0; i < 28; i = i + 7) { + const shift = lo >>> i; + const hasNext = !((shift >>> 7) == 0 && hi == 0); + const byte = (hasNext ? shift | 0x80 : shift) & 0xFF; + bytes.push(byte); + if (!hasNext) { + return; + } } - // remove all listeners - clearLis() { - for (let l of Object.values(this._lis)) - l.splice(0, l.length); + const splitBits = ((lo >>> 28) & 0x0F) | ((hi & 0x07) << 4); + const hasMoreBits = !((hi >> 3) == 0); + bytes.push((hasMoreBits ? splitBits | 0x80 : splitBits) & 0xFF); + if (!hasMoreBits) { + return; } - // --- Controller API - /** - * Is this stream already closed by a completion or error? - */ - get closed() { - return this._closed !== false; + for (let i = 3; i < 31; i = i + 7) { + const shift = hi >>> i; + const hasNext = !((shift >>> 7) == 0); + const byte = (hasNext ? shift | 0x80 : shift) & 0xFF; + bytes.push(byte); + if (!hasNext) { + return; + } } - /** - * Emit message, close with error, or close successfully, but only one - * at a time. - * Can be used to wrap a stream by using the other stream's `onNext`. - */ - notifyNext(message, error, complete) { - runtime_1.assert((message ? 1 : 0) + (error ? 1 : 0) + (complete ? 1 : 0) <= 1, 'only one emission at a time'); - if (message) - this.notifyMessage(message); - if (error) - this.notifyError(error); - if (complete) - this.notifyComplete(); + bytes.push((hi >>> 31) & 0x01); +} +exports.varint64write = varint64write; +// constants for binary math +const TWO_PWR_32_DBL = (1 << 16) * (1 << 16); +/** + * Parse decimal string of 64 bit integer value as two JS numbers. + * + * Returns tuple: + * [0]: minus sign? + * [1]: low bits + * [2]: high bits + * + * Copyright 2008 Google Inc. + */ +function int64fromString(dec) { + // Check for minus sign. + let minus = dec[0] == '-'; + if (minus) + dec = dec.slice(1); + // Work 6 decimal digits at a time, acting like we're converting base 1e6 + // digits to binary. This is safe to do with floating point math because + // Number.isSafeInteger(ALL_32_BITS * 1e6) == true. + const base = 1e6; + let lowBits = 0; + let highBits = 0; + function add1e6digit(begin, end) { + // Note: Number('') is 0. + const digit1e6 = Number(dec.slice(begin, end)); + highBits *= base; + lowBits = lowBits * base + digit1e6; + // Carry bits from lowBits to highBits + if (lowBits >= TWO_PWR_32_DBL) { + highBits = highBits + ((lowBits / TWO_PWR_32_DBL) | 0); + lowBits = lowBits % TWO_PWR_32_DBL; + } } - /** - * Emits a new message. Throws if stream is closed. - * - * Triggers onNext and onMessage callbacks. - */ - notifyMessage(message) { - runtime_1.assert(!this.closed, 'stream is closed'); - this.pushIt({ value: message, done: false }); - this._lis.msg.forEach(l => l(message)); - this._lis.nxt.forEach(l => l(message, undefined, false)); + add1e6digit(-24, -18); + add1e6digit(-18, -12); + add1e6digit(-12, -6); + add1e6digit(-6); + return [minus, lowBits, highBits]; +} +exports.int64fromString = int64fromString; +/** + * Format 64 bit integer value (as two JS numbers) to decimal string. + * + * Copyright 2008 Google Inc. + */ +function int64toString(bitsLow, bitsHigh) { + // Skip the expensive conversion if the number is small enough to use the + // built-in conversions. + if ((bitsHigh >>> 0) <= 0x1FFFFF) { + return '' + (TWO_PWR_32_DBL * bitsHigh + (bitsLow >>> 0)); } - /** - * Closes the stream with an error. Throws if stream is closed. - * - * Triggers onNext and onError callbacks. - */ - notifyError(error) { - runtime_1.assert(!this.closed, 'stream is closed'); - this._closed = error; - this.pushIt(error); - this._lis.err.forEach(l => l(error)); - this._lis.nxt.forEach(l => l(undefined, error, false)); - this.clearLis(); + // What this code is doing is essentially converting the input number from + // base-2 to base-1e7, which allows us to represent the 64-bit range with + // only 3 (very large) digits. Those digits are then trivial to convert to + // a base-10 string. + // The magic numbers used here are - + // 2^24 = 16777216 = (1,6777216) in base-1e7. + // 2^48 = 281474976710656 = (2,8147497,6710656) in base-1e7. + // Split 32:32 representation into 16:24:24 representation so our + // intermediate digits don't overflow. + let low = bitsLow & 0xFFFFFF; + let mid = (((bitsLow >>> 24) | (bitsHigh << 8)) >>> 0) & 0xFFFFFF; + let high = (bitsHigh >> 16) & 0xFFFF; + // Assemble our three base-1e7 digits, ignoring carries. The maximum + // value in a digit at this step is representable as a 48-bit integer, which + // can be stored in a 64-bit floating point number. + let digitA = low + (mid * 6777216) + (high * 6710656); + let digitB = mid + (high * 8147497); + let digitC = (high * 2); + // Apply carries from A to B and from B to C. + let base = 10000000; + if (digitA >= base) { + digitB += Math.floor(digitA / base); + digitA %= base; } - /** - * Closes the stream successfully. Throws if stream is closed. - * - * Triggers onNext and onComplete callbacks. - */ - notifyComplete() { - runtime_1.assert(!this.closed, 'stream is closed'); - this._closed = true; - this.pushIt({ value: null, done: true }); - this._lis.cmp.forEach(l => l()); - this._lis.nxt.forEach(l => l(undefined, undefined, true)); - this.clearLis(); + if (digitB >= base) { + digitC += Math.floor(digitB / base); + digitB %= base; } - /** - * Creates an async iterator (that can be used with `for await {...}`) - * to consume the stream. - * - * Some things to note: - * - If an error occurs, the `for await` will throw it. - * - If an error occurred before the `for await` was started, `for await` - * will re-throw it. - * - If the stream is already complete, the `for await` will be empty. - * - If your `for await` consumes slower than the stream produces, - * for example because you are relaying messages in a slow operation, - * messages are queued. - */ - [Symbol.asyncIterator]() { - // init the iterator state, enabling pushIt() - if (!this._itState) { - this._itState = { q: [] }; + // Convert base-1e7 digits to base-10, with optional leading zeroes. + function decimalFrom1e7(digit1e7, needLeadingZeros) { + let partial = digit1e7 ? String(digit1e7) : ''; + if (needLeadingZeros) { + return '0000000'.slice(partial.length) + partial; } - // if we are closed, we are definitely not receiving any more messages. - // but we can't let the iterator get stuck. we want to either: - // a) finish the new iterator immediately, because we are completed - // b) reject the new iterator, because we errored - if (this._closed === true) - this.pushIt({ value: null, done: true }); - else if (this._closed !== false) - this.pushIt(this._closed); - // the async iterator - return { - next: () => { - let state = this._itState; - runtime_1.assert(state, "bad state"); // if we don't have a state here, code is broken - // there should be no pending result. - // did the consumer call next() before we resolved our previous result promise? - runtime_1.assert(!state.p, "iterator contract broken"); - // did we produce faster than the iterator consumed? - // return the oldest result from the queue. - let first = state.q.shift(); - if (first) - return ("value" in first) ? Promise.resolve(first) : Promise.reject(first); - // we have no result ATM, but we promise one. - // as soon as we have a result, we must resolve promise. - state.p = new deferred_1.Deferred(); - return state.p.promise; - }, - }; + return partial; } - // "push" a new iterator result. - // this either resolves a pending promise, or enqueues the result. - pushIt(result) { - let state = this._itState; - if (!state) - return; - // is the consumer waiting for us? - if (state.p) { - // yes, consumer is waiting for this promise. - const p = state.p; - runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); - // resolve the promise - ("value" in result) ? p.resolve(result) : p.reject(result); - // must cleanup, otherwise iterator.next() would pick it up again. - delete state.p; + return decimalFrom1e7(digitC, /*needLeadingZeros=*/ 0) + + decimalFrom1e7(digitB, /*needLeadingZeros=*/ digitC) + + // If the final 1e7 digit didn't need leading zeros, we would have + // returned via the trivial code path at the top. + decimalFrom1e7(digitA, /*needLeadingZeros=*/ 1); +} +exports.int64toString = int64toString; +/** + * Write a 32 bit varint, signed or unsigned. Same as `varint64write(0, value, bytes)` + * + * Copyright 2008 Google Inc. All rights reserved. + * + * See https://github.com/protocolbuffers/protobuf/blob/1b18833f4f2a2f681f4e4a25cdf3b0a43115ec26/js/binary/encoder.js#L144 + */ +function varint32write(value, bytes) { + if (value >= 0) { + // write value as varint 32 + while (value > 0x7f) { + bytes.push((value & 0x7f) | 0x80); + value = value >>> 7; } - else { - // we are producing faster than the iterator consumes. - // push result onto queue. - state.q.push(result); + bytes.push(value); + } + else { + for (let i = 0; i < 9; i++) { + bytes.push(value & 127 | 128); + value = value >> 7; } + bytes.push(1); } } -exports.RpcOutputStreamController = RpcOutputStreamController; +exports.varint32write = varint32write; +/** + * Read an unsigned 32 bit varint. + * + * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L220 + */ +function varint32read() { + let b = this.buf[this.pos++]; + let result = b & 0x7F; + if ((b & 0x80) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 0x7F) << 7; + if ((b & 0x80) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 0x7F) << 14; + if ((b & 0x80) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 0x7F) << 21; + if ((b & 0x80) == 0) { + this.assertBounds(); + return result; + } + // Extract only last 4 bits + b = this.buf[this.pos++]; + result |= (b & 0x0F) << 28; + for (let readBytes = 5; ((b & 0x80) !== 0) && readBytes < 10; readBytes++) + b = this.buf[this.pos++]; + if ((b & 0x80) != 0) + throw new Error('invalid varint'); + this.assertBounds(); + // Result can have 32 bits, convert it to unsigned + return result >>> 0; +} +exports.varint32read = varint32read; /***/ }), -/***/ 80579: -/***/ ((__unused_webpack_module, exports) => { +/***/ 82905: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +// Public API of the protobuf-ts runtime. +// Note: we do not use `export * from ...` to help tree shakers, +// webpack verbose output hints that this should be useful Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ServerCallContextController = void 0; -class ServerCallContextController { - constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: 'OK', detail: '' }) { - this._cancelled = false; - this._listeners = []; - this.method = method; - this.headers = headers; - this.deadline = deadline; - this.trailers = {}; - this._sendRH = sendResponseHeadersFn; - this.status = defaultStatus; - } - /** - * Set the call cancelled. - * - * Invokes all callbacks registered with onCancel() and - * sets `cancelled = true`. - */ - notifyCancelled() { - if (!this._cancelled) { - this._cancelled = true; - for (let l of this._listeners) { - l(); - } - } - } - /** - * Send response headers. - */ - sendResponseHeaders(data) { - this._sendRH(data); - } - /** - * Is the call cancelled? - * - * When the client closes the connection before the server - * is done, the call is cancelled. - * - * If you want to cancel a request on the server, throw a - * RpcError with the CANCELLED status code. - */ - get cancelled() { - return this._cancelled; - } - /** - * Add a callback for cancellation. - */ - onCancel(callback) { - const l = this._listeners; - l.push(callback); - return () => { - let i = l.indexOf(callback); - if (i >= 0) - l.splice(i, 1); - }; - } -} -exports.ServerCallContextController = ServerCallContextController; +// Convenience JSON typings and corresponding type guards +var json_typings_1 = __nccwpck_require__(66015); +Object.defineProperty(exports, "typeofJsonValue", ({ enumerable: true, get: function () { return json_typings_1.typeofJsonValue; } })); +Object.defineProperty(exports, "isJsonObject", ({ enumerable: true, get: function () { return json_typings_1.isJsonObject; } })); +// Base 64 encoding +var base64_1 = __nccwpck_require__(25081); +Object.defineProperty(exports, "base64decode", ({ enumerable: true, get: function () { return base64_1.base64decode; } })); +Object.defineProperty(exports, "base64encode", ({ enumerable: true, get: function () { return base64_1.base64encode; } })); +// UTF8 encoding +var protobufjs_utf8_1 = __nccwpck_require__(7465); +Object.defineProperty(exports, "utf8read", ({ enumerable: true, get: function () { return protobufjs_utf8_1.utf8read; } })); +// Binary format contracts, options for reading and writing, for example +var binary_format_contract_1 = __nccwpck_require__(46158); +Object.defineProperty(exports, "WireType", ({ enumerable: true, get: function () { return binary_format_contract_1.WireType; } })); +Object.defineProperty(exports, "mergeBinaryOptions", ({ enumerable: true, get: function () { return binary_format_contract_1.mergeBinaryOptions; } })); +Object.defineProperty(exports, "UnknownFieldHandler", ({ enumerable: true, get: function () { return binary_format_contract_1.UnknownFieldHandler; } })); +// Standard IBinaryReader implementation +var binary_reader_1 = __nccwpck_require__(94672); +Object.defineProperty(exports, "BinaryReader", ({ enumerable: true, get: function () { return binary_reader_1.BinaryReader; } })); +Object.defineProperty(exports, "binaryReadOptions", ({ enumerable: true, get: function () { return binary_reader_1.binaryReadOptions; } })); +// Standard IBinaryWriter implementation +var binary_writer_1 = __nccwpck_require__(34474); +Object.defineProperty(exports, "BinaryWriter", ({ enumerable: true, get: function () { return binary_writer_1.BinaryWriter; } })); +Object.defineProperty(exports, "binaryWriteOptions", ({ enumerable: true, get: function () { return binary_writer_1.binaryWriteOptions; } })); +// Int64 and UInt64 implementations required for the binary format +var pb_long_1 = __nccwpck_require__(9569); +Object.defineProperty(exports, "PbLong", ({ enumerable: true, get: function () { return pb_long_1.PbLong; } })); +Object.defineProperty(exports, "PbULong", ({ enumerable: true, get: function () { return pb_long_1.PbULong; } })); +// JSON format contracts, options for reading and writing, for example +var json_format_contract_1 = __nccwpck_require__(53975); +Object.defineProperty(exports, "jsonReadOptions", ({ enumerable: true, get: function () { return json_format_contract_1.jsonReadOptions; } })); +Object.defineProperty(exports, "jsonWriteOptions", ({ enumerable: true, get: function () { return json_format_contract_1.jsonWriteOptions; } })); +Object.defineProperty(exports, "mergeJsonOptions", ({ enumerable: true, get: function () { return json_format_contract_1.mergeJsonOptions; } })); +// Message type contract +var message_type_contract_1 = __nccwpck_require__(28185); +Object.defineProperty(exports, "MESSAGE_TYPE", ({ enumerable: true, get: function () { return message_type_contract_1.MESSAGE_TYPE; } })); +// Message type implementation via reflection +var message_type_1 = __nccwpck_require__(22845); +Object.defineProperty(exports, "MessageType", ({ enumerable: true, get: function () { return message_type_1.MessageType; } })); +// Reflection info, generated by the plugin, exposed to the user, used by reflection ops +var reflection_info_1 = __nccwpck_require__(49354); +Object.defineProperty(exports, "ScalarType", ({ enumerable: true, get: function () { return reflection_info_1.ScalarType; } })); +Object.defineProperty(exports, "LongType", ({ enumerable: true, get: function () { return reflection_info_1.LongType; } })); +Object.defineProperty(exports, "RepeatType", ({ enumerable: true, get: function () { return reflection_info_1.RepeatType; } })); +Object.defineProperty(exports, "normalizeFieldInfo", ({ enumerable: true, get: function () { return reflection_info_1.normalizeFieldInfo; } })); +Object.defineProperty(exports, "readFieldOptions", ({ enumerable: true, get: function () { return reflection_info_1.readFieldOptions; } })); +Object.defineProperty(exports, "readFieldOption", ({ enumerable: true, get: function () { return reflection_info_1.readFieldOption; } })); +Object.defineProperty(exports, "readMessageOption", ({ enumerable: true, get: function () { return reflection_info_1.readMessageOption; } })); +// Message operations via reflection +var reflection_type_check_1 = __nccwpck_require__(67737); +Object.defineProperty(exports, "ReflectionTypeCheck", ({ enumerable: true, get: function () { return reflection_type_check_1.ReflectionTypeCheck; } })); +var reflection_create_1 = __nccwpck_require__(93844); +Object.defineProperty(exports, "reflectionCreate", ({ enumerable: true, get: function () { return reflection_create_1.reflectionCreate; } })); +var reflection_scalar_default_1 = __nccwpck_require__(40809); +Object.defineProperty(exports, "reflectionScalarDefault", ({ enumerable: true, get: function () { return reflection_scalar_default_1.reflectionScalarDefault; } })); +var reflection_merge_partial_1 = __nccwpck_require__(44302); +Object.defineProperty(exports, "reflectionMergePartial", ({ enumerable: true, get: function () { return reflection_merge_partial_1.reflectionMergePartial; } })); +var reflection_equals_1 = __nccwpck_require__(819); +Object.defineProperty(exports, "reflectionEquals", ({ enumerable: true, get: function () { return reflection_equals_1.reflectionEquals; } })); +var reflection_binary_reader_1 = __nccwpck_require__(59879); +Object.defineProperty(exports, "ReflectionBinaryReader", ({ enumerable: true, get: function () { return reflection_binary_reader_1.ReflectionBinaryReader; } })); +var reflection_binary_writer_1 = __nccwpck_require__(39224); +Object.defineProperty(exports, "ReflectionBinaryWriter", ({ enumerable: true, get: function () { return reflection_binary_writer_1.ReflectionBinaryWriter; } })); +var reflection_json_reader_1 = __nccwpck_require__(81926); +Object.defineProperty(exports, "ReflectionJsonReader", ({ enumerable: true, get: function () { return reflection_json_reader_1.ReflectionJsonReader; } })); +var reflection_json_writer_1 = __nccwpck_require__(86596); +Object.defineProperty(exports, "ReflectionJsonWriter", ({ enumerable: true, get: function () { return reflection_json_writer_1.ReflectionJsonWriter; } })); +var reflection_contains_message_type_1 = __nccwpck_require__(3051); +Object.defineProperty(exports, "containsMessageType", ({ enumerable: true, get: function () { return reflection_contains_message_type_1.containsMessageType; } })); +// Oneof helpers +var oneof_1 = __nccwpck_require__(76391); +Object.defineProperty(exports, "isOneofGroup", ({ enumerable: true, get: function () { return oneof_1.isOneofGroup; } })); +Object.defineProperty(exports, "setOneofValue", ({ enumerable: true, get: function () { return oneof_1.setOneofValue; } })); +Object.defineProperty(exports, "getOneofValue", ({ enumerable: true, get: function () { return oneof_1.getOneofValue; } })); +Object.defineProperty(exports, "clearOneofValue", ({ enumerable: true, get: function () { return oneof_1.clearOneofValue; } })); +Object.defineProperty(exports, "getSelectedOneofValue", ({ enumerable: true, get: function () { return oneof_1.getSelectedOneofValue; } })); +// Enum object type guard and reflection util, may be interesting to the user. +var enum_object_1 = __nccwpck_require__(76802); +Object.defineProperty(exports, "listEnumValues", ({ enumerable: true, get: function () { return enum_object_1.listEnumValues; } })); +Object.defineProperty(exports, "listEnumNames", ({ enumerable: true, get: function () { return enum_object_1.listEnumNames; } })); +Object.defineProperty(exports, "listEnumNumbers", ({ enumerable: true, get: function () { return enum_object_1.listEnumNumbers; } })); +Object.defineProperty(exports, "isEnumObject", ({ enumerable: true, get: function () { return enum_object_1.isEnumObject; } })); +// lowerCamelCase() is exported for plugin, rpc-runtime and other rpc packages +var lower_camel_case_1 = __nccwpck_require__(69108); +Object.defineProperty(exports, "lowerCamelCase", ({ enumerable: true, get: function () { return lower_camel_case_1.lowerCamelCase; } })); +// assertion functions are exported for plugin, may also be useful to user +var assert_1 = __nccwpck_require__(31589); +Object.defineProperty(exports, "assert", ({ enumerable: true, get: function () { return assert_1.assert; } })); +Object.defineProperty(exports, "assertNever", ({ enumerable: true, get: function () { return assert_1.assertNever; } })); +Object.defineProperty(exports, "assertInt32", ({ enumerable: true, get: function () { return assert_1.assertInt32; } })); +Object.defineProperty(exports, "assertUInt32", ({ enumerable: true, get: function () { return assert_1.assertUInt32; } })); +Object.defineProperty(exports, "assertFloat32", ({ enumerable: true, get: function () { return assert_1.assertFloat32; } })); /***/ }), -/***/ 81564: -/***/ (function(__unused_webpack_module, exports) { +/***/ 53975: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ServerStreamingCall = void 0; +exports.mergeJsonOptions = exports.jsonWriteOptions = exports.jsonReadOptions = void 0; +const defaultsWrite = { + emitDefaultValues: false, + enumAsInteger: false, + useProtoFieldName: false, + prettySpaces: 0, +}, defaultsRead = { + ignoreUnknownFields: false, +}; /** - * A server streaming RPC call. The client provides exactly one input message - * but the server may respond with 0, 1, or more messages. + * Make options for reading JSON data from partial options. */ -class ServerStreamingCall { - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.request = request; - this.headers = headers; - this.responses = response; - this.status = status; - this.trailers = trailers; - } - /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * You should first setup some listeners to the `request` to - * see the actual messages the server replied with. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter(this, void 0, void 0, function* () { - let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - request: this.request, - headers, - status, - trailers, - }; - }); - } +function jsonReadOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; } -exports.ServerStreamingCall = ServerStreamingCall; +exports.jsonReadOptions = jsonReadOptions; +/** + * Make options for writing JSON data from partial options. + */ +function jsonWriteOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; +} +exports.jsonWriteOptions = jsonWriteOptions; +/** + * Merges JSON write or read options. Later values override earlier values. Type registries are merged. + */ +function mergeJsonOptions(a, b) { + var _a, _b; + let c = Object.assign(Object.assign({}, a), b); + c.typeRegistry = [...((_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : []), ...((_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : [])]; + return c; +} +exports.mergeJsonOptions = mergeJsonOptions; /***/ }), -/***/ 6706: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 66015: +/***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ServiceType = void 0; -const reflection_info_1 = __nccwpck_require__(50748); -class ServiceType { - constructor(typeName, methods, options) { - this.typeName = typeName; - this.methods = methods.map(i => reflection_info_1.normalizeMethodInfo(i, this)); - this.options = options !== null && options !== void 0 ? options : {}; +exports.isJsonObject = exports.typeofJsonValue = void 0; +/** + * Get the type of a JSON value. + * Distinguishes between array, null and object. + */ +function typeofJsonValue(value) { + let t = typeof value; + if (t == "object") { + if (Array.isArray(value)) + return "array"; + if (value === null) + return "null"; } + return t; } -exports.ServiceType = ServiceType; +exports.typeofJsonValue = typeofJsonValue; +/** + * Is this a JSON object (instead of an array or null)? + */ +function isJsonObject(value) { + return value !== null && typeof value == "object" && !Array.isArray(value); +} +exports.isJsonObject = isJsonObject; /***/ }), -/***/ 51527: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { +/***/ 69108: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.TestTransport = void 0; -const rpc_error_1 = __nccwpck_require__(98785); -const runtime_1 = __nccwpck_require__(35435); -const rpc_output_stream_1 = __nccwpck_require__(56092); -const rpc_options_1 = __nccwpck_require__(15580); -const unary_call_1 = __nccwpck_require__(34882); -const server_streaming_call_1 = __nccwpck_require__(81564); -const client_streaming_call_1 = __nccwpck_require__(8376); -const duplex_streaming_call_1 = __nccwpck_require__(76574); +exports.lowerCamelCase = void 0; /** - * Transport for testing. + * Converts snake_case to lowerCamelCase. + * + * Should behave like protoc: + * https://github.com/protocolbuffers/protobuf/blob/e8ae137c96444ea313485ed1118c5e43b2099cf1/src/google/protobuf/compiler/java/java_helpers.cc#L118 */ -class TestTransport { - /** - * Initialize with mock data. Omitted fields have default value. - */ - constructor(data) { - /** - * Suppress warning / error about uncaught rejections of - * "status" and "trailers". - */ - this.suppressUncaughtRejections = true; - this.headerDelay = 10; - this.responseDelay = 50; - this.betweenResponseDelay = 10; - this.afterResponseDelay = 10; - this.data = data !== null && data !== void 0 ? data : {}; - } - /** - * Sent message(s) during the last operation. - */ - get sentMessages() { - if (this.lastInput instanceof TestInputStream) { - return this.lastInput.sent; +function lowerCamelCase(snakeCase) { + let capNext = false; + const sb = []; + for (let i = 0; i < snakeCase.length; i++) { + let next = snakeCase.charAt(i); + if (next == '_') { + capNext = true; } - else if (typeof this.lastInput == "object") { - return [this.lastInput.single]; + else if (/\d/.test(next)) { + sb.push(next); + capNext = true; } - return []; - } - /** - * Sending message(s) completed? - */ - get sendComplete() { - if (this.lastInput instanceof TestInputStream) { - return this.lastInput.completed; + else if (capNext) { + sb.push(next.toUpperCase()); + capNext = false; } - else if (typeof this.lastInput == "object") { - return true; + else if (i == 0) { + sb.push(next.toLowerCase()); + } + else { + sb.push(next); } - return false; } - // Creates a promise for response headers from the mock data. - promiseHeaders() { - var _a; - const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : TestTransport.defaultHeaders; - return headers instanceof rpc_error_1.RpcError - ? Promise.reject(headers) - : Promise.resolve(headers); + return sb.join(''); +} +exports.lowerCamelCase = lowerCamelCase; + + +/***/ }), + +/***/ 28185: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MESSAGE_TYPE = void 0; +/** + * The symbol used as a key on message objects to store the message type. + * + * Note that this is an experimental feature - it is here to stay, but + * implementation details may change without notice. + */ +exports.MESSAGE_TYPE = Symbol.for("protobuf-ts/message-type"); + + +/***/ }), + +/***/ 22845: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MessageType = void 0; +const message_type_contract_1 = __nccwpck_require__(28185); +const reflection_info_1 = __nccwpck_require__(49354); +const reflection_type_check_1 = __nccwpck_require__(67737); +const reflection_json_reader_1 = __nccwpck_require__(81926); +const reflection_json_writer_1 = __nccwpck_require__(86596); +const reflection_binary_reader_1 = __nccwpck_require__(59879); +const reflection_binary_writer_1 = __nccwpck_require__(39224); +const reflection_create_1 = __nccwpck_require__(93844); +const reflection_merge_partial_1 = __nccwpck_require__(44302); +const json_typings_1 = __nccwpck_require__(66015); +const json_format_contract_1 = __nccwpck_require__(53975); +const reflection_equals_1 = __nccwpck_require__(819); +const binary_writer_1 = __nccwpck_require__(34474); +const binary_reader_1 = __nccwpck_require__(94672); +const baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); +/** + * This standard message type provides reflection-based + * operations to work with a message. + */ +class MessageType { + constructor(name, fields, options) { + this.defaultCheckDepth = 16; + this.typeName = name; + this.fields = fields.map(reflection_info_1.normalizeFieldInfo); + this.options = options !== null && options !== void 0 ? options : {}; + this.messagePrototype = Object.create(null, Object.assign(Object.assign({}, baseDescriptors), { [message_type_contract_1.MESSAGE_TYPE]: { value: this } })); + this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); + this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); + this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); + this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); + this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); } - // Creates a promise for a single, valid, message from the mock data. - promiseSingleResponse(method) { - if (this.data.response instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.response); - } - let r; - if (Array.isArray(this.data.response)) { - runtime_1.assert(this.data.response.length > 0); - r = this.data.response[0]; - } - else if (this.data.response !== undefined) { - r = this.data.response; - } - else { - r = method.O.create(); + create(value) { + let message = reflection_create_1.reflectionCreate(this); + if (value !== undefined) { + reflection_merge_partial_1.reflectionMergePartial(this, message, value); } - runtime_1.assert(method.O.is(r)); - return Promise.resolve(r); + return message; } /** - * Pushes response messages from the mock data to the output stream. - * If an error response, status or trailers are mocked, the stream is - * closed with the respective error. - * Otherwise, stream is completed successfully. + * Clone the message. * - * The returned promise resolves when the stream is closed. It should - * not reject. If it does, code is broken. + * Unknown fields are discarded. */ - streamResponses(method, stream, abort) { - return __awaiter(this, void 0, void 0, function* () { - // normalize "data.response" into an array of valid output messages - const messages = []; - if (this.data.response === undefined) { - messages.push(method.O.create()); - } - else if (Array.isArray(this.data.response)) { - for (let msg of this.data.response) { - runtime_1.assert(method.O.is(msg)); - messages.push(msg); - } - } - else if (!(this.data.response instanceof rpc_error_1.RpcError)) { - runtime_1.assert(method.O.is(this.data.response)); - messages.push(this.data.response); - } - // start the stream with an initial delay. - // if the request is cancelled, notify() error and exit. - try { - yield delay(this.responseDelay, abort)(undefined); - } - catch (error) { - stream.notifyError(error); - return; - } - // if error response was mocked, notify() error (stream is now closed with error) and exit. - if (this.data.response instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.response); - return; - } - // regular response messages were mocked. notify() them. - for (let msg of messages) { - stream.notifyMessage(msg); - // add a short delay between responses - // if the request is cancelled, notify() error and exit. - try { - yield delay(this.betweenResponseDelay, abort)(undefined); - } - catch (error) { - stream.notifyError(error); - return; - } - } - // error status was mocked, notify() error (stream is now closed with error) and exit. - if (this.data.status instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.status); - return; - } - // error trailers were mocked, notify() error (stream is now closed with error) and exit. - if (this.data.trailers instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.trailers); - return; - } - // stream completed successfully - stream.notifyComplete(); - }); + clone(message) { + let copy = this.create(); + reflection_merge_partial_1.reflectionMergePartial(this, copy, message); + return copy; } - // Creates a promise for response status from the mock data. - promiseStatus() { - var _a; - const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : TestTransport.defaultStatus; - return status instanceof rpc_error_1.RpcError - ? Promise.reject(status) - : Promise.resolve(status); + /** + * Determines whether two message of the same type have the same field values. + * Checks for deep equality, traversing repeated fields, oneof groups, maps + * and messages recursively. + * Will also return true if both messages are `undefined`. + */ + equals(a, b) { + return reflection_equals_1.reflectionEquals(this, a, b); } - // Creates a promise for response trailers from the mock data. - promiseTrailers() { - var _a; - const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : TestTransport.defaultTrailers; - return trailers instanceof rpc_error_1.RpcError - ? Promise.reject(trailers) - : Promise.resolve(trailers); + /** + * Is the given value assignable to our message type + * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? + */ + is(arg, depth = this.defaultCheckDepth) { + return this.refTypeCheck.is(arg, depth, false); } - maybeSuppressUncaught(...promise) { - if (this.suppressUncaughtRejections) { - for (let p of promise) { - p.catch(() => { - }); - } - } + /** + * Is the given value assignable to our message type, + * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? + */ + isAssignable(arg, depth = this.defaultCheckDepth) { + return this.refTypeCheck.is(arg, depth, true); } - mergeOptions(options) { - return rpc_options_1.mergeRpcOptions({}, options); + /** + * Copy partial data into the target message. + */ + mergePartial(target, source) { + reflection_merge_partial_1.reflectionMergePartial(this, target, source); } - unary(method, input, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() - .then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise - .catch(_ => { - }) - .then(delay(this.responseDelay, options.abort)) - .then(_ => this.promiseSingleResponse(method)), statusPromise = responsePromise - .catch(_ => { - }) - .then(delay(this.afterResponseDelay, options.abort)) - .then(_ => this.promiseStatus()), trailersPromise = responsePromise - .catch(_ => { - }) - .then(delay(this.afterResponseDelay, options.abort)) - .then(_ => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = { single: input }; - return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); + /** + * Create a new message from binary format. + */ + fromBinary(data, options) { + let opt = binary_reader_1.binaryReadOptions(options); + return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); } - serverStreaming(method, input, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() - .then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise - .then(delay(this.responseDelay, options.abort)) - .catch(() => { - }) - .then(() => this.streamResponses(method, outputStream, options.abort)) - .then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise - .then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise - .then(() => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = { single: input }; - return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); + /** + * Read a new message from a JSON value. + */ + fromJson(json, options) { + return this.internalJsonRead(json, json_format_contract_1.jsonReadOptions(options)); } - clientStreaming(method, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() - .then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise - .catch(_ => { - }) - .then(delay(this.responseDelay, options.abort)) - .then(_ => this.promiseSingleResponse(method)), statusPromise = responsePromise - .catch(_ => { - }) - .then(delay(this.afterResponseDelay, options.abort)) - .then(_ => this.promiseStatus()), trailersPromise = responsePromise - .catch(_ => { - }) - .then(delay(this.afterResponseDelay, options.abort)) - .then(_ => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = new TestInputStream(this.data, options.abort); - return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); + /** + * Read a new message from a JSON string. + * This is equivalent to `T.fromJson(JSON.parse(json))`. + */ + fromJsonString(json, options) { + let value = JSON.parse(json); + return this.fromJson(value, options); } - duplex(method, options) { + /** + * Write the message to canonical JSON value. + */ + toJson(message, options) { + return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); + } + /** + * Convert the message to canonical JSON string. + * This is equivalent to `JSON.stringify(T.toJson(t))` + */ + toJsonString(message, options) { var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() - .then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise - .then(delay(this.responseDelay, options.abort)) - .catch(() => { - }) - .then(() => this.streamResponses(method, outputStream, options.abort)) - .then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise - .then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise - .then(() => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = new TestInputStream(this.data, options.abort); - return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); + let value = this.toJson(message, options); + return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); } -} -exports.TestTransport = TestTransport; -TestTransport.defaultHeaders = { - responseHeader: "test" -}; -TestTransport.defaultStatus = { - code: "OK", detail: "all good" -}; -TestTransport.defaultTrailers = { - responseTrailer: "test" -}; -function delay(ms, abort) { - return (v) => new Promise((resolve, reject) => { - if (abort === null || abort === void 0 ? void 0 : abort.aborted) { - reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); - } - else { - const id = setTimeout(() => resolve(v), ms); - if (abort) { - abort.addEventListener("abort", ev => { - clearTimeout(id); - reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); - }); - } - } - }); -} -class TestInputStream { - constructor(data, abort) { - this._completed = false; - this._sent = []; - this.data = data; - this.abort = abort; + /** + * Write the message to binary format. + */ + toBinary(message, options) { + let opt = binary_writer_1.binaryWriteOptions(options); + return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); } - get sent() { - return this._sent; + /** + * This is an internal method. If you just want to read a message from + * JSON, use `fromJson()` or `fromJsonString()`. + * + * Reads JSON value and merges the fields into the target + * according to protobuf rules. If the target is omitted, + * a new instance is created first. + */ + internalJsonRead(json, options, target) { + if (json !== null && typeof json == "object" && !Array.isArray(json)) { + let message = target !== null && target !== void 0 ? target : this.create(); + this.refJsonReader.read(json, message, options); + return message; + } + throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json)}.`); } - get completed() { - return this._completed; + /** + * This is an internal method. If you just want to write a message + * to JSON, use `toJson()` or `toJsonString(). + * + * Writes JSON value and returns it. + */ + internalJsonWrite(message, options) { + return this.refJsonWriter.write(message, options); } - send(message) { - if (this.data.inputMessage instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.inputMessage); - } - const delayMs = this.data.inputMessage === undefined - ? 10 - : this.data.inputMessage; - return Promise.resolve(undefined) - .then(() => { - this._sent.push(message); - }) - .then(delay(delayMs, this.abort)); + /** + * This is an internal method. If you just want to write a message + * in binary format, use `toBinary()`. + * + * Serializes the message in binary format and appends it to the given + * writer. Returns passed writer. + */ + internalBinaryWrite(message, writer, options) { + this.refBinWriter.write(message, writer, options); + return writer; } - complete() { - if (this.data.inputComplete instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.inputComplete); - } - const delayMs = this.data.inputComplete === undefined - ? 10 - : this.data.inputComplete; - return Promise.resolve(undefined) - .then(() => { - this._completed = true; - }) - .then(delay(delayMs, this.abort)); + /** + * This is an internal method. If you just want to read a message from + * binary data, use `fromBinary()`. + * + * Reads data from binary format and merges the fields into + * the target according to protobuf rules. If the target is + * omitted, a new instance is created first. + */ + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(); + this.refBinReader.read(reader, message, options, length); + return message; } } +exports.MessageType = MessageType; /***/ }), -/***/ 34882: -/***/ (function(__unused_webpack_module, exports) { +/***/ 76391: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.UnaryCall = void 0; +exports.getSelectedOneofValue = exports.clearOneofValue = exports.setUnknownOneofValue = exports.setOneofValue = exports.getOneofValue = exports.isOneofGroup = void 0; /** - * A unary RPC call. Unary means there is exactly one input message and - * exactly one output message unless an error occurred. + * Is the given value a valid oneof group? + * + * We represent protobuf `oneof` as algebraic data types (ADT) in generated + * code. But when working with messages of unknown type, the ADT does not + * help us. + * + * This type guard checks if the given object adheres to the ADT rules, which + * are as follows: + * + * 1) Must be an object. + * + * 2) Must have a "oneofKind" discriminator property. + * + * 3) If "oneofKind" is `undefined`, no member field is selected. The object + * must not have any other properties. + * + * 4) If "oneofKind" is a `string`, the member field with this name is + * selected. + * + * 5) If a member field is selected, the object must have a second property + * with this name. The property must not be `undefined`. + * + * 6) No extra properties are allowed. The object has either one property + * (no selection) or two properties (selection). + * */ -class UnaryCall { - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.request = request; - this.headers = headers; - this.response = response; - this.status = status; - this.trailers = trailers; - } - /** - * If you are only interested in the final outcome of this call, - * you can await it to receive a `FinishedUnaryCall`. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter(this, void 0, void 0, function* () { - let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - request: this.request, - headers, - response, - status, - trailers - }; - }); +function isOneofGroup(any) { + if (typeof any != 'object' || any === null || !any.hasOwnProperty('oneofKind')) { + return false; } -} -exports.UnaryCall = UnaryCall; - - -/***/ }), - -/***/ 88501: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.assertFloat32 = exports.assertUInt32 = exports.assertInt32 = exports.assertNever = exports.assert = void 0; -/** - * assert that condition is true or throw error (with message) - */ -function assert(condition, msg) { - if (!condition) { - throw new Error(msg); + switch (typeof any.oneofKind) { + case "string": + if (any[any.oneofKind] === undefined) + return false; + return Object.keys(any).length == 2; + case "undefined": + return Object.keys(any).length == 1; + default: + return false; } } -exports.assert = assert; +exports.isOneofGroup = isOneofGroup; /** - * assert that value cannot exist = type `never`. throw runtime error if it does. + * Returns the value of the given field in a oneof group. */ -function assertNever(value, msg) { - throw new Error(msg !== null && msg !== void 0 ? msg : 'Unexpected object: ' + value); -} -exports.assertNever = assertNever; -const FLOAT32_MAX = 3.4028234663852886e+38, FLOAT32_MIN = -3.4028234663852886e+38, UINT32_MAX = 0xFFFFFFFF, INT32_MAX = 0X7FFFFFFF, INT32_MIN = -0X80000000; -function assertInt32(arg) { - if (typeof arg !== "number") - throw new Error('invalid int 32: ' + typeof arg); - if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) - throw new Error('invalid int 32: ' + arg); +function getOneofValue(oneof, kind) { + return oneof[kind]; } -exports.assertInt32 = assertInt32; -function assertUInt32(arg) { - if (typeof arg !== "number") - throw new Error('invalid uint 32: ' + typeof arg); - if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) - throw new Error('invalid uint 32: ' + arg); +exports.getOneofValue = getOneofValue; +function setOneofValue(oneof, kind, value) { + if (oneof.oneofKind !== undefined) { + delete oneof[oneof.oneofKind]; + } + oneof.oneofKind = kind; + if (value !== undefined) { + oneof[kind] = value; + } } -exports.assertUInt32 = assertUInt32; -function assertFloat32(arg) { - if (typeof arg !== "number") - throw new Error('invalid float 32: ' + typeof arg); - if (!Number.isFinite(arg)) - return; - if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) - throw new Error('invalid float 32: ' + arg); +exports.setOneofValue = setOneofValue; +function setUnknownOneofValue(oneof, kind, value) { + if (oneof.oneofKind !== undefined) { + delete oneof[oneof.oneofKind]; + } + oneof.oneofKind = kind; + if (value !== undefined && kind !== undefined) { + oneof[kind] = value; + } } -exports.assertFloat32 = assertFloat32; - - -/***/ }), - -/***/ 90943: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.base64encode = exports.base64decode = void 0; -// lookup table from base64 character to byte -let encTable = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); -// lookup table from base64 character *code* to byte because lookup by number is fast -let decTable = []; -for (let i = 0; i < encTable.length; i++) - decTable[encTable[i].charCodeAt(0)] = i; -// support base64url variants -decTable["-".charCodeAt(0)] = encTable.indexOf("+"); -decTable["_".charCodeAt(0)] = encTable.indexOf("/"); +exports.setUnknownOneofValue = setUnknownOneofValue; /** - * Decodes a base64 string to a byte array. + * Removes the selected field in a oneof group. * - * - ignores white-space, including line breaks and tabs - * - allows inner padding (can decode concatenated base64 strings) - * - does not require padding - * - understands base64url encoding: - * "-" instead of "+", - * "_" instead of "/", - * no padding + * Note that the recommended way to modify a oneof group is to set + * a new object: + * + * ```ts + * message.result = { oneofKind: undefined }; + * ``` */ -function base64decode(base64Str) { - // estimate byte size, not accounting for inner padding and whitespace - let es = base64Str.length * 3 / 4; - // if (es % 3 !== 0) - // throw new Error('invalid base64 string'); - if (base64Str[base64Str.length - 2] == '=') - es -= 2; - else if (base64Str[base64Str.length - 1] == '=') - es -= 1; - let bytes = new Uint8Array(es), bytePos = 0, // position in byte array - groupPos = 0, // position in base64 group - b, // current byte - p = 0 // previous byte - ; - for (let i = 0; i < base64Str.length; i++) { - b = decTable[base64Str.charCodeAt(i)]; - if (b === undefined) { - // noinspection FallThroughInSwitchStatementJS - switch (base64Str[i]) { - case '=': - groupPos = 0; // reset state when padding found - case '\n': - case '\r': - case '\t': - case ' ': - continue; // skip white-space, and padding - default: - throw Error(`invalid base64 string.`); - } - } - switch (groupPos) { - case 0: - p = b; - groupPos = 1; - break; - case 1: - bytes[bytePos++] = p << 2 | (b & 48) >> 4; - p = b; - groupPos = 2; - break; - case 2: - bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; - p = b; - groupPos = 3; - break; - case 3: - bytes[bytePos++] = (p & 3) << 6 | b; - groupPos = 0; - break; - } +function clearOneofValue(oneof) { + if (oneof.oneofKind !== undefined) { + delete oneof[oneof.oneofKind]; } - if (groupPos == 1) - throw Error(`invalid base64 string.`); - return bytes.subarray(0, bytePos); + oneof.oneofKind = undefined; } -exports.base64decode = base64decode; +exports.clearOneofValue = clearOneofValue; /** - * Encodes a byte array to a base64 string. - * Adds padding at the end. - * Does not insert newlines. + * Returns the selected value of the given oneof group. + * + * Not that the recommended way to access a oneof group is to check + * the "oneofKind" property and let TypeScript narrow down the union + * type for you: + * + * ```ts + * if (message.result.oneofKind === "error") { + * message.result.error; // string + * } + * ``` + * + * In the rare case you just need the value, and do not care about + * which protobuf field is selected, you can use this function + * for convenience. */ -function base64encode(bytes) { - let base64 = '', groupPos = 0, // position in base64 group - b, // current byte - p = 0; // carry over from previous byte - for (let i = 0; i < bytes.length; i++) { - b = bytes[i]; - switch (groupPos) { - case 0: - base64 += encTable[b >> 2]; - p = (b & 3) << 4; - groupPos = 1; - break; - case 1: - base64 += encTable[p | b >> 4]; - p = (b & 15) << 2; - groupPos = 2; - break; - case 2: - base64 += encTable[p | b >> 6]; - base64 += encTable[b & 63]; - groupPos = 0; - break; - } - } - // padding required? - if (groupPos) { - base64 += encTable[p]; - base64 += '='; - if (groupPos == 1) - base64 += '='; +function getSelectedOneofValue(oneof) { + if (oneof.oneofKind === undefined) { + return undefined; } - return base64; + return oneof[oneof.oneofKind]; } -exports.base64encode = base64encode; +exports.getSelectedOneofValue = getSelectedOneofValue; /***/ }), -/***/ 24290: -/***/ ((__unused_webpack_module, exports) => { +/***/ 9569: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.WireType = exports.mergeBinaryOptions = exports.UnknownFieldHandler = void 0; -/** - * This handler implements the default behaviour for unknown fields. - * When reading data, unknown fields are stored on the message, in a - * symbol property. - * When writing data, the symbol property is queried and unknown fields - * are serialized into the output again. - */ -var UnknownFieldHandler; -(function (UnknownFieldHandler) { +exports.PbLong = exports.PbULong = exports.detectBi = void 0; +const goog_varint_1 = __nccwpck_require__(56568); +let BI; +function detectBi() { + const dv = new DataView(new ArrayBuffer(8)); + const ok = globalThis.BigInt !== undefined + && typeof dv.getBigInt64 === "function" + && typeof dv.getBigUint64 === "function" + && typeof dv.setBigInt64 === "function" + && typeof dv.setBigUint64 === "function"; + BI = ok ? { + MIN: BigInt("-9223372036854775808"), + MAX: BigInt("9223372036854775807"), + UMIN: BigInt("0"), + UMAX: BigInt("18446744073709551615"), + C: BigInt, + V: dv, + } : undefined; +} +exports.detectBi = detectBi; +detectBi(); +function assertBi(bi) { + if (!bi) + throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); +} +// used to validate from(string) input (when bigint is unavailable) +const RE_DECIMAL_STR = /^-?[0-9]+$/; +// constants for binary math +const TWO_PWR_32_DBL = 0x100000000; +const HALF_2_PWR_32 = 0x080000000; +// base class for PbLong and PbULong provides shared code +class SharedPbLong { /** - * The symbol used to store unknown fields for a message. - * The property must conform to `UnknownFieldContainer`. + * Create a new instance with the given bits. */ - UnknownFieldHandler.symbol = Symbol.for("protobuf-ts/unknown"); + constructor(lo, hi) { + this.lo = lo | 0; + this.hi = hi | 0; + } /** - * Store an unknown field during binary read directly on the message. - * This method is compatible with `BinaryReadOptions.readUnknownField`. + * Is this instance equal to 0? */ - UnknownFieldHandler.onRead = (typeName, message, fieldNo, wireType, data) => { - let container = is(message) ? message[UnknownFieldHandler.symbol] : message[UnknownFieldHandler.symbol] = []; - container.push({ no: fieldNo, wireType, data }); - }; + isZero() { + return this.lo == 0 && this.hi == 0; + } /** - * Write unknown fields stored for the message to the writer. - * This method is compatible with `BinaryWriteOptions.writeUnknownFields`. + * Convert to a native number. */ - UnknownFieldHandler.onWrite = (typeName, message, writer) => { - for (let { no, wireType, data } of UnknownFieldHandler.list(message)) - writer.tag(no, wireType).raw(data); - }; + toNumber() { + let result = this.hi * TWO_PWR_32_DBL + (this.lo >>> 0); + if (!Number.isSafeInteger(result)) + throw new Error("cannot convert to safe number"); + return result; + } +} +/** + * 64-bit unsigned integer as two 32-bit values. + * Converts between `string`, `number` and `bigint` representations. + */ +class PbULong extends SharedPbLong { /** - * List unknown fields stored for the message. - * Note that there may be multiples fields with the same number. + * Create instance from a `string`, `number` or `bigint`. */ - UnknownFieldHandler.list = (message, fieldNo) => { - if (is(message)) { - let all = message[UnknownFieldHandler.symbol]; - return fieldNo ? all.filter(uf => uf.no == fieldNo) : all; - } - return []; - }; + static from(value) { + if (BI) + // noinspection FallThroughInSwitchStatementJS + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + if (value == "") + throw new Error('string is no integer'); + value = BI.C(value); + case "number": + if (value === 0) + return this.ZERO; + value = BI.C(value); + case "bigint": + if (!value) + return this.ZERO; + if (value < BI.UMIN) + throw new Error('signed value for ulong'); + if (value > BI.UMAX) + throw new Error('ulong too large'); + BI.V.setBigUint64(0, value, true); + return new PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + } + else + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + value = value.trim(); + if (!RE_DECIMAL_STR.test(value)) + throw new Error('string is no integer'); + let [minus, lo, hi] = goog_varint_1.int64fromString(value); + if (minus) + throw new Error('signed value for ulong'); + return new PbULong(lo, hi); + case "number": + if (value == 0) + return this.ZERO; + if (!Number.isSafeInteger(value)) + throw new Error('number is no integer'); + if (value < 0) + throw new Error('signed value for ulong'); + return new PbULong(value, value / TWO_PWR_32_DBL); + } + throw new Error('unknown value ' + typeof value); + } /** - * Returns the last unknown field by field number. + * Convert to decimal string. */ - UnknownFieldHandler.last = (message, fieldNo) => UnknownFieldHandler.list(message, fieldNo).slice(-1)[0]; - const is = (message) => message && Array.isArray(message[UnknownFieldHandler.symbol]); -})(UnknownFieldHandler = exports.UnknownFieldHandler || (exports.UnknownFieldHandler = {})); + toString() { + return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); + } + /** + * Convert to native bigint. + */ + toBigInt() { + assertBi(BI); + BI.V.setInt32(0, this.lo, true); + BI.V.setInt32(4, this.hi, true); + return BI.V.getBigUint64(0, true); + } +} +exports.PbULong = PbULong; /** - * Merges binary write or read options. Later values override earlier values. + * ulong 0 singleton. */ -function mergeBinaryOptions(a, b) { - return Object.assign(Object.assign({}, a), b); -} -exports.mergeBinaryOptions = mergeBinaryOptions; +PbULong.ZERO = new PbULong(0, 0); /** - * Protobuf binary format wire types. - * - * A wire type provides just enough information to find the length of the - * following value. - * - * See https://developers.google.com/protocol-buffers/docs/encoding#structure + * 64-bit signed integer as two 32-bit values. + * Converts between `string`, `number` and `bigint` representations. */ -var WireType; -(function (WireType) { - /** - * Used for int32, int64, uint32, uint64, sint32, sint64, bool, enum - */ - WireType[WireType["Varint"] = 0] = "Varint"; +class PbLong extends SharedPbLong { /** - * Used for fixed64, sfixed64, double. - * Always 8 bytes with little-endian byte order. + * Create instance from a `string`, `number` or `bigint`. */ - WireType[WireType["Bit64"] = 1] = "Bit64"; + static from(value) { + if (BI) + // noinspection FallThroughInSwitchStatementJS + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + if (value == "") + throw new Error('string is no integer'); + value = BI.C(value); + case "number": + if (value === 0) + return this.ZERO; + value = BI.C(value); + case "bigint": + if (!value) + return this.ZERO; + if (value < BI.MIN) + throw new Error('signed long too small'); + if (value > BI.MAX) + throw new Error('signed long too large'); + BI.V.setBigInt64(0, value, true); + return new PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + } + else + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + value = value.trim(); + if (!RE_DECIMAL_STR.test(value)) + throw new Error('string is no integer'); + let [minus, lo, hi] = goog_varint_1.int64fromString(value); + if (minus) { + if (hi > HALF_2_PWR_32 || (hi == HALF_2_PWR_32 && lo != 0)) + throw new Error('signed long too small'); + } + else if (hi >= HALF_2_PWR_32) + throw new Error('signed long too large'); + let pbl = new PbLong(lo, hi); + return minus ? pbl.negate() : pbl; + case "number": + if (value == 0) + return this.ZERO; + if (!Number.isSafeInteger(value)) + throw new Error('number is no integer'); + return value > 0 + ? new PbLong(value, value / TWO_PWR_32_DBL) + : new PbLong(-value, -value / TWO_PWR_32_DBL).negate(); + } + throw new Error('unknown value ' + typeof value); + } /** - * Used for string, bytes, embedded messages, packed repeated fields - * - * Only repeated numeric types (types which use the varint, 32-bit, - * or 64-bit wire types) can be packed. In proto3, such fields are - * packed by default. + * Do we have a minus sign? */ - WireType[WireType["LengthDelimited"] = 2] = "LengthDelimited"; + isNegative() { + return (this.hi & HALF_2_PWR_32) !== 0; + } /** - * Used for groups - * @deprecated + * Negate two's complement. + * Invert all the bits and add one to the result. */ - WireType[WireType["StartGroup"] = 3] = "StartGroup"; + negate() { + let hi = ~this.hi, lo = this.lo; + if (lo) + lo = ~lo + 1; + else + hi += 1; + return new PbLong(lo, hi); + } /** - * Used for groups - * @deprecated + * Convert to decimal string. */ - WireType[WireType["EndGroup"] = 4] = "EndGroup"; - /** - * Used for fixed32, sfixed32, float. - * Always 4 bytes with little-endian byte order. + toString() { + if (BI) + return this.toBigInt().toString(); + if (this.isNegative()) { + let n = this.negate(); + return '-' + goog_varint_1.int64toString(n.lo, n.hi); + } + return goog_varint_1.int64toString(this.lo, this.hi); + } + /** + * Convert to native bigint. */ - WireType[WireType["Bit32"] = 5] = "Bit32"; -})(WireType = exports.WireType || (exports.WireType = {})); + toBigInt() { + assertBi(BI); + BI.V.setInt32(0, this.lo, true); + BI.V.setInt32(4, this.hi, true); + return BI.V.getBigInt64(0, true); + } +} +exports.PbLong = PbLong; +/** + * long 0 singleton. + */ +PbLong.ZERO = new PbLong(0, 0); /***/ }), -/***/ 15963: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7465: +/***/ ((__unused_webpack_module, exports) => { "use strict"; +// Copyright (c) 2016, Daniel Wirtz All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// * Neither the name of its author, nor the names of its contributors +// may be used to endorse or promote products derived from this software +// without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BinaryReader = exports.binaryReadOptions = void 0; -const binary_format_contract_1 = __nccwpck_require__(24290); -const pb_long_1 = __nccwpck_require__(19823); -const goog_varint_1 = __nccwpck_require__(58378); -const defaultsRead = { - readUnknownField: true, - readerFactory: bytes => new BinaryReader(bytes), -}; +exports.utf8read = void 0; +const fromCharCodes = (chunk) => String.fromCharCode.apply(String, chunk); /** - * Make options for reading binary data form partial options. + * @deprecated This function will no longer be exported with the next major + * release, since protobuf-ts has switch to TextDecoder API. If you need this + * function, please migrate to @protobufjs/utf8. For context, see + * https://github.com/timostamm/protobuf-ts/issues/184 + * + * Reads UTF8 bytes as a string. + * + * See [protobufjs / utf8](https://github.com/protobufjs/protobuf.js/blob/9893e35b854621cce64af4bf6be2cff4fb892796/lib/utf8/index.js#L40) + * + * Copyright (c) 2016, Daniel Wirtz */ -function binaryReadOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; -} -exports.binaryReadOptions = binaryReadOptions; -class BinaryReader { - constructor(buf, textDecoder) { - this.varint64 = goog_varint_1.varint64read; // dirty cast for `this` - /** - * Read a `uint32` field, an unsigned 32 bit varint. - */ - this.uint32 = goog_varint_1.varint32read; // dirty cast for `this` and access to protected `buf` - this.buf = buf; - this.len = buf.length; - this.pos = 0; - this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); - this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { - fatal: true, - ignoreBOM: true, - }); - } - /** - * Reads a tag - field number and wire type. - */ - tag() { - let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; - if (fieldNo <= 0 || wireType < 0 || wireType > 5) - throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); - return [fieldNo, wireType]; - } - /** - * Skip one element on the wire and return the skipped data. - * Supports WireType.StartGroup since v2.0.0-alpha.23. - */ - skip(wireType) { - let start = this.pos; - // noinspection FallThroughInSwitchStatementJS - switch (wireType) { - case binary_format_contract_1.WireType.Varint: - while (this.buf[this.pos++] & 0x80) { - // ignore - } - break; - case binary_format_contract_1.WireType.Bit64: - this.pos += 4; - case binary_format_contract_1.WireType.Bit32: - this.pos += 4; - break; - case binary_format_contract_1.WireType.LengthDelimited: - let len = this.uint32(); - this.pos += len; - break; - case binary_format_contract_1.WireType.StartGroup: - // From descriptor.proto: Group type is deprecated, not supported in proto3. - // But we must still be able to parse and treat as unknown. - let t; - while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { - this.skip(t); - } - break; - default: - throw new Error("cant skip wire type " + wireType); +function utf8read(bytes) { + if (bytes.length < 1) + return ""; + let pos = 0, // position in bytes + parts = [], chunk = [], i = 0, // char offset + t; // temporary + let len = bytes.length; + while (pos < len) { + t = bytes[pos++]; + if (t < 128) + chunk[i++] = t; + else if (t > 191 && t < 224) + chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; + else if (t > 239 && t < 365) { + t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 0x10000; + chunk[i++] = 0xD800 + (t >> 10); + chunk[i++] = 0xDC00 + (t & 1023); + } + else + chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; + if (i > 8191) { + parts.push(fromCharCodes(chunk)); + i = 0; } - this.assertBounds(); - return this.buf.subarray(start, this.pos); - } - /** - * Throws error if position in byte array is out of range. - */ - assertBounds() { - if (this.pos > this.len) - throw new RangeError("premature EOF"); - } - /** - * Read a `int32` field, a signed 32 bit varint. - */ - int32() { - return this.uint32() | 0; - } - /** - * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. - */ - sint32() { - let zze = this.uint32(); - // decode zigzag - return (zze >>> 1) ^ -(zze & 1); - } - /** - * Read a `int64` field, a signed 64-bit varint. - */ - int64() { - return new pb_long_1.PbLong(...this.varint64()); - } - /** - * Read a `uint64` field, an unsigned 64-bit varint. - */ - uint64() { - return new pb_long_1.PbULong(...this.varint64()); - } - /** - * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. - */ - sint64() { - let [lo, hi] = this.varint64(); - // decode zig zag - let s = -(lo & 1); - lo = ((lo >>> 1 | (hi & 1) << 31) ^ s); - hi = (hi >>> 1 ^ s); - return new pb_long_1.PbLong(lo, hi); - } - /** - * Read a `bool` field, a variant. - */ - bool() { - let [lo, hi] = this.varint64(); - return lo !== 0 || hi !== 0; - } - /** - * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. - */ - fixed32() { - return this.view.getUint32((this.pos += 4) - 4, true); - } - /** - * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. - */ - sfixed32() { - return this.view.getInt32((this.pos += 4) - 4, true); - } - /** - * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. - */ - fixed64() { - return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); - } - /** - * Read a `fixed64` field, a signed, fixed-length 64-bit integer. - */ - sfixed64() { - return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); - } - /** - * Read a `float` field, 32-bit floating point number. - */ - float() { - return this.view.getFloat32((this.pos += 4) - 4, true); - } - /** - * Read a `double` field, a 64-bit floating point number. - */ - double() { - return this.view.getFloat64((this.pos += 8) - 8, true); - } - /** - * Read a `bytes` field, length-delimited arbitrary data. - */ - bytes() { - let len = this.uint32(); - let start = this.pos; - this.pos += len; - this.assertBounds(); - return this.buf.subarray(start, start + len); } - /** - * Read a `string` field, length-delimited data converted to UTF-8 text. - */ - string() { - return this.textDecoder.decode(this.bytes()); + if (parts.length) { + if (i) + parts.push(fromCharCodes(chunk.slice(0, i))); + return parts.join(""); } + return fromCharCodes(chunk.slice(0, i)); } -exports.BinaryReader = BinaryReader; +exports.utf8read = utf8read; /***/ }), -/***/ 11547: +/***/ 59879: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.BinaryWriter = exports.binaryWriteOptions = void 0; -const pb_long_1 = __nccwpck_require__(19823); -const goog_varint_1 = __nccwpck_require__(58378); -const assert_1 = __nccwpck_require__(88501); -const defaultsWrite = { - writeUnknownFields: true, - writerFactory: () => new BinaryWriter(), -}; +exports.ReflectionBinaryReader = void 0; +const binary_format_contract_1 = __nccwpck_require__(46158); +const reflection_info_1 = __nccwpck_require__(49354); +const reflection_long_convert_1 = __nccwpck_require__(98572); +const reflection_scalar_default_1 = __nccwpck_require__(40809); /** - * Make options for writing binary data form partial options. + * Reads proto3 messages in binary format using reflection information. + * + * https://developers.google.com/protocol-buffers/docs/encoding */ -function binaryWriteOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; -} -exports.binaryWriteOptions = binaryWriteOptions; -class BinaryWriter { - constructor(textEncoder) { - /** - * Previous fork states. - */ - this.stack = []; - this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); - this.chunks = []; - this.buf = []; +class ReflectionBinaryReader { + constructor(info) { + this.info = info; } - /** - * Return all bytes written and reset this writer. - */ - finish() { - this.chunks.push(new Uint8Array(this.buf)); // flush the buffer - let len = 0; - for (let i = 0; i < this.chunks.length; i++) - len += this.chunks[i].length; - let bytes = new Uint8Array(len); - let offset = 0; - for (let i = 0; i < this.chunks.length; i++) { - bytes.set(this.chunks[i], offset); - offset += this.chunks[i].length; + prepare() { + var _a; + if (!this.fieldNoToField) { + const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; + this.fieldNoToField = new Map(fieldsInput.map(field => [field.no, field])); } - this.chunks = []; - return bytes; - } - /** - * Start a new fork for length-delimited data like a message - * or a packed repeated field. - * - * Must be joined later with `join()`. - */ - fork() { - this.stack.push({ chunks: this.chunks, buf: this.buf }); - this.chunks = []; - this.buf = []; - return this; - } - /** - * Join the last fork. Write its length and bytes, then - * return to the previous state. - */ - join() { - // get chunk of fork - let chunk = this.finish(); - // restore previous state - let prev = this.stack.pop(); - if (!prev) - throw new Error('invalid state, fork stack empty'); - this.chunks = prev.chunks; - this.buf = prev.buf; - // write length of chunk as varint - this.uint32(chunk.byteLength); - return this.raw(chunk); } /** - * Writes a tag (field number and wire type). + * Reads a message from binary format into the target message. * - * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. + * Repeated fields are appended. Map entries are added, overwriting + * existing keys. * - * Generated code should compute the tag ahead of time and call `uint32()`. + * If a message field is already present, it will be merged with the + * new data. */ - tag(fieldNo, type) { - return this.uint32((fieldNo << 3 | type) >>> 0); + read(reader, message, options, length) { + this.prepare(); + const end = length === undefined ? reader.len : reader.pos + length; + while (reader.pos < end) { + // read the tag and find the field + const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); + if (!field) { + let u = options.readUnknownField; + if (u == "throw") + throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); + continue; + } + // target object for the field we are reading + let target = message, repeated = field.repeat, localName = field.localName; + // if field is member of oneof ADT, use ADT as target + if (field.oneof) { + target = target[field.oneof]; + // if other oneof member selected, set new ADT + if (target.oneofKind !== localName) + target = message[field.oneof] = { + oneofKind: localName + }; + } + // we have handled oneof above, we just have read the value into `target[localName]` + switch (field.kind) { + case "scalar": + case "enum": + let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + let L = field.kind == "scalar" ? field.L : undefined; + if (repeated) { + let arr = target[localName]; // safe to assume presence of array, oneof cannot contain repeated values + if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { + let e = reader.uint32() + reader.pos; + while (reader.pos < e) + arr.push(this.scalar(reader, T, L)); + } + else + arr.push(this.scalar(reader, T, L)); + } + else + target[localName] = this.scalar(reader, T, L); + break; + case "message": + if (repeated) { + let arr = target[localName]; // safe to assume presence of array, oneof cannot contain repeated values + let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); + arr.push(msg); + } + else + target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); + break; + case "map": + let [mapKey, mapVal] = this.mapEntry(field, reader, options); + // safe to assume presence of map object, oneof cannot contain repeated values + target[localName][mapKey] = mapVal; + break; + } + } } /** - * Write a chunk of raw bytes. + * Read a map field, expecting key field = 1, value field = 2 */ - raw(chunk) { - if (this.buf.length) { - this.chunks.push(new Uint8Array(this.buf)); - this.buf = []; + mapEntry(field, reader, options) { + let length = reader.uint32(); + let end = reader.pos + length; + let key = undefined; // javascript only allows number or string for object properties + let val = undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + if (field.K == reflection_info_1.ScalarType.BOOL) + key = reader.bool().toString(); + else + // long types are read as string, number types are okay as number + key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); + break; + case 2: + switch (field.V.kind) { + case "scalar": + val = this.scalar(reader, field.V.T, field.V.L); + break; + case "enum": + val = reader.int32(); + break; + case "message": + val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); + break; + } + break; + default: + throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); + } } - this.chunks.push(chunk); - return this; + if (key === undefined) { + let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); + key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; + } + if (val === undefined) + switch (field.V.kind) { + case "scalar": + val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); + break; + case "enum": + val = 0; + break; + case "message": + val = field.V.T().create(); + break; + } + return [key, val]; } - /** - * Write a `uint32` value, an unsigned 32 bit varint. - */ - uint32(value) { - assert_1.assertUInt32(value); - // write value as varint 32, inlined for speed - while (value > 0x7f) { - this.buf.push((value & 0x7f) | 0x80); - value = value >>> 7; + scalar(reader, type, longType) { + switch (type) { + case reflection_info_1.ScalarType.INT32: + return reader.int32(); + case reflection_info_1.ScalarType.STRING: + return reader.string(); + case reflection_info_1.ScalarType.BOOL: + return reader.bool(); + case reflection_info_1.ScalarType.DOUBLE: + return reader.double(); + case reflection_info_1.ScalarType.FLOAT: + return reader.float(); + case reflection_info_1.ScalarType.INT64: + return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); + case reflection_info_1.ScalarType.UINT64: + return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); + case reflection_info_1.ScalarType.FIXED64: + return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); + case reflection_info_1.ScalarType.FIXED32: + return reader.fixed32(); + case reflection_info_1.ScalarType.BYTES: + return reader.bytes(); + case reflection_info_1.ScalarType.UINT32: + return reader.uint32(); + case reflection_info_1.ScalarType.SFIXED32: + return reader.sfixed32(); + case reflection_info_1.ScalarType.SFIXED64: + return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); + case reflection_info_1.ScalarType.SINT32: + return reader.sint32(); + case reflection_info_1.ScalarType.SINT64: + return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); } - this.buf.push(value); - return this; } - /** - * Write a `int32` value, a signed 32 bit varint. - */ - int32(value) { - assert_1.assertInt32(value); - goog_varint_1.varint32write(value, this.buf); - return this; - } - /** - * Write a `bool` value, a variant. - */ - bool(value) { - this.buf.push(value ? 1 : 0); - return this; - } - /** - * Write a `bytes` value, length-delimited arbitrary data. - */ - bytes(value) { - this.uint32(value.byteLength); // write length of chunk as varint - return this.raw(value); - } - /** - * Write a `string` value, length-delimited data converted to UTF-8 text. - */ - string(value) { - let chunk = this.textEncoder.encode(value); - this.uint32(chunk.byteLength); // write length of chunk as varint - return this.raw(chunk); - } - /** - * Write a `float` value, 32-bit floating point number. - */ - float(value) { - assert_1.assertFloat32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setFloat32(0, value, true); - return this.raw(chunk); - } - /** - * Write a `double` value, a 64-bit floating point number. - */ - double(value) { - let chunk = new Uint8Array(8); - new DataView(chunk.buffer).setFloat64(0, value, true); - return this.raw(chunk); - } - /** - * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. - */ - fixed32(value) { - assert_1.assertUInt32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setUint32(0, value, true); - return this.raw(chunk); +} +exports.ReflectionBinaryReader = ReflectionBinaryReader; + + +/***/ }), + +/***/ 39224: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ReflectionBinaryWriter = void 0; +const binary_format_contract_1 = __nccwpck_require__(46158); +const reflection_info_1 = __nccwpck_require__(49354); +const assert_1 = __nccwpck_require__(31589); +const pb_long_1 = __nccwpck_require__(9569); +/** + * Writes proto3 messages in binary format using reflection information. + * + * https://developers.google.com/protocol-buffers/docs/encoding + */ +class ReflectionBinaryWriter { + constructor(info) { + this.info = info; } - /** - * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. - */ - sfixed32(value) { - assert_1.assertInt32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setInt32(0, value, true); - return this.raw(chunk); + prepare() { + if (!this.fields) { + const fieldsInput = this.info.fields ? this.info.fields.concat() : []; + this.fields = fieldsInput.sort((a, b) => a.no - b.no); + } } /** - * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. + * Writes the message to binary format. */ - sint32(value) { - assert_1.assertInt32(value); - // zigzag encode - value = ((value << 1) ^ (value >> 31)) >>> 0; - goog_varint_1.varint32write(value, this.buf); - return this; + write(message, writer, options) { + this.prepare(); + for (const field of this.fields) { + let value, // this will be our field value, whether it is member of a oneof or not + emitDefault, // whether we emit the default value (only true for oneof members) + repeated = field.repeat, localName = field.localName; + // handle oneof ADT + if (field.oneof) { + const group = message[field.oneof]; + if (group.oneofKind !== localName) + continue; // if field is not selected, skip + value = group[localName]; + emitDefault = true; + } + else { + value = message[localName]; + emitDefault = false; + } + // we have handled oneof above. we just have to honor `emitDefault`. + switch (field.kind) { + case "scalar": + case "enum": + let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + if (repeated) { + assert_1.assert(Array.isArray(value)); + if (repeated == reflection_info_1.RepeatType.PACKED) + this.packed(writer, T, field.no, value); + else + for (const item of value) + this.scalar(writer, T, field.no, item, true); + } + else if (value === undefined) + assert_1.assert(field.opt); + else + this.scalar(writer, T, field.no, value, emitDefault || field.opt); + break; + case "message": + if (repeated) { + assert_1.assert(Array.isArray(value)); + for (const item of value) + this.message(writer, options, field.T(), field.no, item); + } + else { + this.message(writer, options, field.T(), field.no, value); + } + break; + case "map": + assert_1.assert(typeof value == 'object' && value !== null); + for (const [key, val] of Object.entries(value)) + this.mapEntry(writer, options, field, key, val); + break; + } + } + let u = options.writeUnknownFields; + if (u !== false) + (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); } - /** - * Write a `fixed64` value, a signed, fixed-length 64-bit integer. - */ - sfixed64(value) { - let chunk = new Uint8Array(8); - let view = new DataView(chunk.buffer); - let long = pb_long_1.PbLong.from(value); - view.setInt32(0, long.lo, true); - view.setInt32(4, long.hi, true); - return this.raw(chunk); + mapEntry(writer, options, field, key, value) { + writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); + writer.fork(); + // javascript only allows number or string for object properties + // we convert from our representation to the protobuf type + let keyValue = key; + switch (field.K) { + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.UINT32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + keyValue = Number.parseInt(key); + break; + case reflection_info_1.ScalarType.BOOL: + assert_1.assert(key == 'true' || key == 'false'); + keyValue = key == 'true'; + break; + } + // write key, expecting key field number = 1 + this.scalar(writer, field.K, 1, keyValue, true); + // write value, expecting value field number = 2 + switch (field.V.kind) { + case 'scalar': + this.scalar(writer, field.V.T, 2, value, true); + break; + case 'enum': + this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); + break; + case 'message': + this.message(writer, options, field.V.T(), 2, value); + break; + } + writer.join(); } - /** - * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. - */ - fixed64(value) { - let chunk = new Uint8Array(8); - let view = new DataView(chunk.buffer); - let long = pb_long_1.PbULong.from(value); - view.setInt32(0, long.lo, true); - view.setInt32(4, long.hi, true); - return this.raw(chunk); + message(writer, options, handler, fieldNo, value) { + if (value === undefined) + return; + handler.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); + writer.join(); } /** - * Write a `int64` value, a signed 64-bit varint. + * Write a single scalar value. */ - int64(value) { - let long = pb_long_1.PbLong.from(value); - goog_varint_1.varint64write(long.lo, long.hi, this.buf); - return this; + scalar(writer, type, fieldNo, value, emitDefault) { + let [wireType, method, isDefault] = this.scalarInfo(type, value); + if (!isDefault || emitDefault) { + writer.tag(fieldNo, wireType); + writer[method](value); + } } /** - * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. + * Write an array of scalar values in packed format. */ - sint64(value) { - let long = pb_long_1.PbLong.from(value), - // zigzag encode - sign = long.hi >> 31, lo = (long.lo << 1) ^ sign, hi = ((long.hi << 1) | (long.lo >>> 31)) ^ sign; - goog_varint_1.varint64write(lo, hi, this.buf); - return this; + packed(writer, type, fieldNo, value) { + if (!value.length) + return; + assert_1.assert(type !== reflection_info_1.ScalarType.BYTES && type !== reflection_info_1.ScalarType.STRING); + // write tag + writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); + // begin length-delimited + writer.fork(); + // write values without tags + let [, method,] = this.scalarInfo(type); + for (let i = 0; i < value.length; i++) + writer[method](value[i]); + // end length delimited + writer.join(); } /** - * Write a `uint64` value, an unsigned 64-bit varint. + * Get information for writing a scalar value. + * + * Returns tuple: + * [0]: appropriate WireType + * [1]: name of the appropriate method of IBinaryWriter + * [2]: whether the given value is a default value + * + * If argument `value` is omitted, [2] is always false. */ - uint64(value) { - let long = pb_long_1.PbULong.from(value); - goog_varint_1.varint64write(long.lo, long.hi, this.buf); - return this; + scalarInfo(type, value) { + let t = binary_format_contract_1.WireType.Varint; + let m; + let i = value === undefined; + let d = value === 0; + switch (type) { + case reflection_info_1.ScalarType.INT32: + m = "int32"; + break; + case reflection_info_1.ScalarType.STRING: + d = i || !value.length; + t = binary_format_contract_1.WireType.LengthDelimited; + m = "string"; + break; + case reflection_info_1.ScalarType.BOOL: + d = value === false; + m = "bool"; + break; + case reflection_info_1.ScalarType.UINT32: + m = "uint32"; + break; + case reflection_info_1.ScalarType.DOUBLE: + t = binary_format_contract_1.WireType.Bit64; + m = "double"; + break; + case reflection_info_1.ScalarType.FLOAT: + t = binary_format_contract_1.WireType.Bit32; + m = "float"; + break; + case reflection_info_1.ScalarType.INT64: + d = i || pb_long_1.PbLong.from(value).isZero(); + m = "int64"; + break; + case reflection_info_1.ScalarType.UINT64: + d = i || pb_long_1.PbULong.from(value).isZero(); + m = "uint64"; + break; + case reflection_info_1.ScalarType.FIXED64: + d = i || pb_long_1.PbULong.from(value).isZero(); + t = binary_format_contract_1.WireType.Bit64; + m = "fixed64"; + break; + case reflection_info_1.ScalarType.BYTES: + d = i || !value.byteLength; + t = binary_format_contract_1.WireType.LengthDelimited; + m = "bytes"; + break; + case reflection_info_1.ScalarType.FIXED32: + t = binary_format_contract_1.WireType.Bit32; + m = "fixed32"; + break; + case reflection_info_1.ScalarType.SFIXED32: + t = binary_format_contract_1.WireType.Bit32; + m = "sfixed32"; + break; + case reflection_info_1.ScalarType.SFIXED64: + d = i || pb_long_1.PbLong.from(value).isZero(); + t = binary_format_contract_1.WireType.Bit64; + m = "sfixed64"; + break; + case reflection_info_1.ScalarType.SINT32: + m = "sint32"; + break; + case reflection_info_1.ScalarType.SINT64: + d = i || pb_long_1.PbLong.from(value).isZero(); + m = "sint64"; + break; + } + return [t, m, i || d]; } } -exports.BinaryWriter = BinaryWriter; +exports.ReflectionBinaryWriter = ReflectionBinaryWriter; /***/ }), -/***/ 12923: -/***/ ((__unused_webpack_module, exports) => { +/***/ 3051: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.listEnumNumbers = exports.listEnumNames = exports.listEnumValues = exports.isEnumObject = void 0; -/** - * Is this a lookup object generated by Typescript, for a Typescript enum - * generated by protobuf-ts? - * - * - No `const enum` (enum must not be inlined, we need reverse mapping). - * - No string enum (we need int32 for protobuf). - * - Must have a value for 0 (otherwise, we would need to support custom default values). - */ -function isEnumObject(arg) { - if (typeof arg != 'object' || arg === null) { - return false; - } - if (!arg.hasOwnProperty(0)) { - return false; - } - for (let k of Object.keys(arg)) { - let num = parseInt(k); - if (!Number.isNaN(num)) { - // is there a name for the number? - let nam = arg[num]; - if (nam === undefined) - return false; - // does the name resolve back to the number? - if (arg[nam] !== num) - return false; - } - else { - // is there a number for the name? - let num = arg[k]; - if (num === undefined) - return false; - // is it a string enum? - if (typeof num !== 'number') - return false; - // do we know the number? - if (arg[num] === undefined) - return false; - } - } - return true; -} -exports.isEnumObject = isEnumObject; -/** - * Lists all values of a Typescript enum, as an array of objects with a "name" - * property and a "number" property. - * - * Note that it is possible that a number appears more than once, because it is - * possible to have aliases in an enum. - * - * Throws if the enum does not adhere to the rules of enums generated by - * protobuf-ts. See `isEnumObject()`. - */ -function listEnumValues(enumObject) { - if (!isEnumObject(enumObject)) - throw new Error("not a typescript enum object"); - let values = []; - for (let [name, number] of Object.entries(enumObject)) - if (typeof number == "number") - values.push({ name, number }); - return values; -} -exports.listEnumValues = listEnumValues; +exports.containsMessageType = void 0; +const message_type_contract_1 = __nccwpck_require__(28185); /** - * Lists the names of a Typescript enum. + * Check if the provided object is a proto message. * - * Throws if the enum does not adhere to the rules of enums generated by - * protobuf-ts. See `isEnumObject()`. + * Note that this is an experimental feature - it is here to stay, but + * implementation details may change without notice. */ -function listEnumNames(enumObject) { - return listEnumValues(enumObject).map(val => val.name); +function containsMessageType(msg) { + return msg[message_type_contract_1.MESSAGE_TYPE] != null; } -exports.listEnumNames = listEnumNames; +exports.containsMessageType = containsMessageType; + + +/***/ }), + +/***/ 93844: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.reflectionCreate = void 0; +const reflection_scalar_default_1 = __nccwpck_require__(40809); +const message_type_contract_1 = __nccwpck_require__(28185); /** - * Lists the numbers of a Typescript enum. - * - * Throws if the enum does not adhere to the rules of enums generated by - * protobuf-ts. See `isEnumObject()`. + * Creates an instance of the generic message, using the field + * information. */ -function listEnumNumbers(enumObject) { - return listEnumValues(enumObject) - .map(val => val.number) - .filter((num, index, arr) => arr.indexOf(num) == index); +function reflectionCreate(type) { + /** + * This ternary can be removed in the next major version. + * The `Object.create()` code path utilizes a new `messagePrototype` + * property on the `IMessageType` which has this same `MESSAGE_TYPE` + * non-enumerable property on it. Doing it this way means that we only + * pay the cost of `Object.defineProperty()` once per `IMessageType` + * class of once per "instance". The falsy code path is only provided + * for backwards compatibility in cases where the runtime library is + * updated without also updating the generated code. + */ + const msg = type.messagePrototype + ? Object.create(type.messagePrototype) + : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type }); + for (let field of type.fields) { + let name = field.localName; + if (field.opt) + continue; + if (field.oneof) + msg[field.oneof] = { oneofKind: undefined }; + else if (field.repeat) + msg[name] = []; + else + switch (field.kind) { + case "scalar": + msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); + break; + case "enum": + // we require 0 to be default value for all enums + msg[name] = 0; + break; + case "map": + msg[name] = {}; + break; + } + } + return msg; } -exports.listEnumNumbers = listEnumNumbers; +exports.reflectionCreate = reflectionCreate; /***/ }), -/***/ 58378: -/***/ ((__unused_webpack_module, exports) => { +/***/ 819: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Copyright 2008 Google Inc. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// -// Code generated by the Protocol Buffer compiler is owned by the owner -// of the input file used when generating it. This code is not -// standalone and requires a support library to be linked with it. This -// support library is itself covered by the above license. Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.varint32read = exports.varint32write = exports.int64toString = exports.int64fromString = exports.varint64write = exports.varint64read = void 0; +exports.reflectionEquals = void 0; +const reflection_info_1 = __nccwpck_require__(49354); /** - * Read a 64 bit varint as two JS numbers. - * - * Returns tuple: - * [0]: low bits - * [0]: high bits - * - * Copyright 2008 Google Inc. All rights reserved. - * - * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L175 - */ -function varint64read() { - let lowBits = 0; - let highBits = 0; - for (let shift = 0; shift < 28; shift += 7) { - let b = this.buf[this.pos++]; - lowBits |= (b & 0x7F) << shift; - if ((b & 0x80) == 0) { - this.assertBounds(); - return [lowBits, highBits]; - } - } - let middleByte = this.buf[this.pos++]; - // last four bits of the first 32 bit number - lowBits |= (middleByte & 0x0F) << 28; - // 3 upper bits are part of the next 32 bit number - highBits = (middleByte & 0x70) >> 4; - if ((middleByte & 0x80) == 0) { - this.assertBounds(); - return [lowBits, highBits]; - } - for (let shift = 3; shift <= 31; shift += 7) { - let b = this.buf[this.pos++]; - highBits |= (b & 0x7F) << shift; - if ((b & 0x80) == 0) { - this.assertBounds(); - return [lowBits, highBits]; + * Determines whether two message of the same type have the same field values. + * Checks for deep equality, traversing repeated fields, oneof groups, maps + * and messages recursively. + * Will also return true if both messages are `undefined`. + */ +function reflectionEquals(info, a, b) { + if (a === b) + return true; + if (!a || !b) + return false; + for (let field of info.fields) { + let localName = field.localName; + let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; + let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; + switch (field.kind) { + case "enum": + case "scalar": + let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + if (!(field.repeat + ? repeatedPrimitiveEq(t, val_a, val_b) + : primitiveEq(t, val_a, val_b))) + return false; + break; + case "map": + if (!(field.V.kind == "message" + ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) + : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) + return false; + break; + case "message": + let T = field.T(); + if (!(field.repeat + ? repeatedMsgEq(T, val_a, val_b) + : T.equals(val_a, val_b))) + return false; + break; } } - throw new Error('invalid varint'); + return true; } -exports.varint64read = varint64read; +exports.reflectionEquals = reflectionEquals; +const objectValues = Object.values; +function primitiveEq(type, a, b) { + if (a === b) + return true; + if (type !== reflection_info_1.ScalarType.BYTES) + return false; + let ba = a; + let bb = b; + if (ba.length !== bb.length) + return false; + for (let i = 0; i < ba.length; i++) + if (ba[i] != bb[i]) + return false; + return true; +} +function repeatedPrimitiveEq(type, a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) + if (!primitiveEq(type, a[i], b[i])) + return false; + return true; +} +function repeatedMsgEq(type, a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) + if (!type.equals(a[i], b[i])) + return false; + return true; +} + + +/***/ }), + +/***/ 49354: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.readMessageOption = exports.readFieldOption = exports.readFieldOptions = exports.normalizeFieldInfo = exports.RepeatType = exports.LongType = exports.ScalarType = void 0; +const lower_camel_case_1 = __nccwpck_require__(69108); /** - * Write a 64 bit varint, given as two JS numbers, to the given bytes array. - * - * Copyright 2008 Google Inc. All rights reserved. - * - * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/writer.js#L344 + * Scalar value types. This is a subset of field types declared by protobuf + * enum google.protobuf.FieldDescriptorProto.Type The types GROUP and MESSAGE + * are omitted, but the numerical values are identical. */ -function varint64write(lo, hi, bytes) { - for (let i = 0; i < 28; i = i + 7) { - const shift = lo >>> i; - const hasNext = !((shift >>> 7) == 0 && hi == 0); - const byte = (hasNext ? shift | 0x80 : shift) & 0xFF; - bytes.push(byte); - if (!hasNext) { - return; - } - } - const splitBits = ((lo >>> 28) & 0x0F) | ((hi & 0x07) << 4); - const hasMoreBits = !((hi >> 3) == 0); - bytes.push((hasMoreBits ? splitBits | 0x80 : splitBits) & 0xFF); - if (!hasMoreBits) { - return; - } - for (let i = 3; i < 31; i = i + 7) { - const shift = hi >>> i; - const hasNext = !((shift >>> 7) == 0); - const byte = (hasNext ? shift | 0x80 : shift) & 0xFF; - bytes.push(byte); - if (!hasNext) { - return; - } - } - bytes.push((hi >>> 31) & 0x01); -} -exports.varint64write = varint64write; -// constants for binary math -const TWO_PWR_32_DBL = (1 << 16) * (1 << 16); +var ScalarType; +(function (ScalarType) { + // 0 is reserved for errors. + // Order is weird for historical reasons. + ScalarType[ScalarType["DOUBLE"] = 1] = "DOUBLE"; + ScalarType[ScalarType["FLOAT"] = 2] = "FLOAT"; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + ScalarType[ScalarType["INT64"] = 3] = "INT64"; + ScalarType[ScalarType["UINT64"] = 4] = "UINT64"; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + ScalarType[ScalarType["INT32"] = 5] = "INT32"; + ScalarType[ScalarType["FIXED64"] = 6] = "FIXED64"; + ScalarType[ScalarType["FIXED32"] = 7] = "FIXED32"; + ScalarType[ScalarType["BOOL"] = 8] = "BOOL"; + ScalarType[ScalarType["STRING"] = 9] = "STRING"; + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + // TYPE_GROUP = 10, + // TYPE_MESSAGE = 11, // Length-delimited aggregate. + // New in version 2. + ScalarType[ScalarType["BYTES"] = 12] = "BYTES"; + ScalarType[ScalarType["UINT32"] = 13] = "UINT32"; + // TYPE_ENUM = 14, + ScalarType[ScalarType["SFIXED32"] = 15] = "SFIXED32"; + ScalarType[ScalarType["SFIXED64"] = 16] = "SFIXED64"; + ScalarType[ScalarType["SINT32"] = 17] = "SINT32"; + ScalarType[ScalarType["SINT64"] = 18] = "SINT64"; +})(ScalarType = exports.ScalarType || (exports.ScalarType = {})); /** - * Parse decimal string of 64 bit integer value as two JS numbers. + * JavaScript representation of 64 bit integral types. Equivalent to the + * field option "jstype". * - * Returns tuple: - * [0]: minus sign? - * [1]: low bits - * [2]: high bits + * By default, protobuf-ts represents 64 bit types as `bigint`. * - * Copyright 2008 Google Inc. - */ -function int64fromString(dec) { - // Check for minus sign. - let minus = dec[0] == '-'; - if (minus) - dec = dec.slice(1); - // Work 6 decimal digits at a time, acting like we're converting base 1e6 - // digits to binary. This is safe to do with floating point math because - // Number.isSafeInteger(ALL_32_BITS * 1e6) == true. - const base = 1e6; - let lowBits = 0; - let highBits = 0; - function add1e6digit(begin, end) { - // Note: Number('') is 0. - const digit1e6 = Number(dec.slice(begin, end)); - highBits *= base; - lowBits = lowBits * base + digit1e6; - // Carry bits from lowBits to highBits - if (lowBits >= TWO_PWR_32_DBL) { - highBits = highBits + ((lowBits / TWO_PWR_32_DBL) | 0); - lowBits = lowBits % TWO_PWR_32_DBL; - } - } - add1e6digit(-24, -18); - add1e6digit(-18, -12); - add1e6digit(-12, -6); - add1e6digit(-6); - return [minus, lowBits, highBits]; -} -exports.int64fromString = int64fromString; -/** - * Format 64 bit integer value (as two JS numbers) to decimal string. + * You can change the default behaviour by enabling the plugin parameter + * `long_type_string`, which will represent 64 bit types as `string`. * - * Copyright 2008 Google Inc. + * Alternatively, you can change the behaviour for individual fields + * with the field option "jstype": + * + * ```protobuf + * uint64 my_field = 1 [jstype = JS_STRING]; + * uint64 other_field = 2 [jstype = JS_NUMBER]; + * ``` */ -function int64toString(bitsLow, bitsHigh) { - // Skip the expensive conversion if the number is small enough to use the - // built-in conversions. - if ((bitsHigh >>> 0) <= 0x1FFFFF) { - return '' + (TWO_PWR_32_DBL * bitsHigh + (bitsLow >>> 0)); - } - // What this code is doing is essentially converting the input number from - // base-2 to base-1e7, which allows us to represent the 64-bit range with - // only 3 (very large) digits. Those digits are then trivial to convert to - // a base-10 string. - // The magic numbers used here are - - // 2^24 = 16777216 = (1,6777216) in base-1e7. - // 2^48 = 281474976710656 = (2,8147497,6710656) in base-1e7. - // Split 32:32 representation into 16:24:24 representation so our - // intermediate digits don't overflow. - let low = bitsLow & 0xFFFFFF; - let mid = (((bitsLow >>> 24) | (bitsHigh << 8)) >>> 0) & 0xFFFFFF; - let high = (bitsHigh >> 16) & 0xFFFF; - // Assemble our three base-1e7 digits, ignoring carries. The maximum - // value in a digit at this step is representable as a 48-bit integer, which - // can be stored in a 64-bit floating point number. - let digitA = low + (mid * 6777216) + (high * 6710656); - let digitB = mid + (high * 8147497); - let digitC = (high * 2); - // Apply carries from A to B and from B to C. - let base = 10000000; - if (digitA >= base) { - digitB += Math.floor(digitA / base); - digitA %= base; - } - if (digitB >= base) { - digitC += Math.floor(digitB / base); - digitB %= base; - } - // Convert base-1e7 digits to base-10, with optional leading zeroes. - function decimalFrom1e7(digit1e7, needLeadingZeros) { - let partial = digit1e7 ? String(digit1e7) : ''; - if (needLeadingZeros) { - return '0000000'.slice(partial.length) + partial; - } - return partial; - } - return decimalFrom1e7(digitC, /*needLeadingZeros=*/ 0) + - decimalFrom1e7(digitB, /*needLeadingZeros=*/ digitC) + - // If the final 1e7 digit didn't need leading zeros, we would have - // returned via the trivial code path at the top. - decimalFrom1e7(digitA, /*needLeadingZeros=*/ 1); -} -exports.int64toString = int64toString; +var LongType; +(function (LongType) { + /** + * Use JavaScript `bigint`. + * + * Field option `[jstype = JS_NORMAL]`. + */ + LongType[LongType["BIGINT"] = 0] = "BIGINT"; + /** + * Use JavaScript `string`. + * + * Field option `[jstype = JS_STRING]`. + */ + LongType[LongType["STRING"] = 1] = "STRING"; + /** + * Use JavaScript `number`. + * + * Large values will loose precision. + * + * Field option `[jstype = JS_NUMBER]`. + */ + LongType[LongType["NUMBER"] = 2] = "NUMBER"; +})(LongType = exports.LongType || (exports.LongType = {})); /** - * Write a 32 bit varint, signed or unsigned. Same as `varint64write(0, value, bytes)` + * Protobuf 2.1.0 introduced packed repeated fields. + * Setting the field option `[packed = true]` enables packing. * - * Copyright 2008 Google Inc. All rights reserved. + * In proto3, all repeated fields are packed by default. + * Setting the field option `[packed = false]` disables packing. * - * See https://github.com/protocolbuffers/protobuf/blob/1b18833f4f2a2f681f4e4a25cdf3b0a43115ec26/js/binary/encoder.js#L144 + * Packed repeated fields are encoded with a single tag, + * then a length-delimiter, then the element values. + * + * Unpacked repeated fields are encoded with a tag and + * value for each element. + * + * `bytes` and `string` cannot be packed. */ -function varint32write(value, bytes) { - if (value >= 0) { - // write value as varint 32 - while (value > 0x7f) { - bytes.push((value & 0x7f) | 0x80); - value = value >>> 7; - } - bytes.push(value); - } - else { - for (let i = 0; i < 9; i++) { - bytes.push(value & 127 | 128); - value = value >> 7; - } - bytes.push(1); - } +var RepeatType; +(function (RepeatType) { + /** + * The field is not repeated. + */ + RepeatType[RepeatType["NO"] = 0] = "NO"; + /** + * The field is repeated and should be packed. + * Invalid for `bytes` and `string`, they cannot be packed. + */ + RepeatType[RepeatType["PACKED"] = 1] = "PACKED"; + /** + * The field is repeated but should not be packed. + * The only valid repeat type for repeated `bytes` and `string`. + */ + RepeatType[RepeatType["UNPACKED"] = 2] = "UNPACKED"; +})(RepeatType = exports.RepeatType || (exports.RepeatType = {})); +/** + * Turns PartialFieldInfo into FieldInfo. + */ +function normalizeFieldInfo(field) { + var _a, _b, _c, _d; + field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); + field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); + field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; + field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : (field.repeat ? false : field.oneof ? false : field.kind == "message"); + return field; } -exports.varint32write = varint32write; +exports.normalizeFieldInfo = normalizeFieldInfo; /** - * Read an unsigned 32 bit varint. + * Read custom field options from a generated message type. * - * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L220 + * @deprecated use readFieldOption() */ -function varint32read() { - let b = this.buf[this.pos++]; - let result = b & 0x7F; - if ((b & 0x80) == 0) { - this.assertBounds(); - return result; - } - b = this.buf[this.pos++]; - result |= (b & 0x7F) << 7; - if ((b & 0x80) == 0) { - this.assertBounds(); - return result; +function readFieldOptions(messageType, fieldName, extensionName, extensionType) { + var _a; + const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; + return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : undefined; +} +exports.readFieldOptions = readFieldOptions; +function readFieldOption(messageType, fieldName, extensionName, extensionType) { + var _a; + const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; + if (!options) { + return undefined; } - b = this.buf[this.pos++]; - result |= (b & 0x7F) << 14; - if ((b & 0x80) == 0) { - this.assertBounds(); - return result; + const optionVal = options[extensionName]; + if (optionVal === undefined) { + return optionVal; } - b = this.buf[this.pos++]; - result |= (b & 0x7F) << 21; - if ((b & 0x80) == 0) { - this.assertBounds(); - return result; + return extensionType ? extensionType.fromJson(optionVal) : optionVal; +} +exports.readFieldOption = readFieldOption; +function readMessageOption(messageType, extensionName, extensionType) { + const options = messageType.options; + const optionVal = options[extensionName]; + if (optionVal === undefined) { + return optionVal; } - // Extract only last 4 bits - b = this.buf[this.pos++]; - result |= (b & 0x0F) << 28; - for (let readBytes = 5; ((b & 0x80) !== 0) && readBytes < 10; readBytes++) - b = this.buf[this.pos++]; - if ((b & 0x80) != 0) - throw new Error('invalid varint'); - this.assertBounds(); - // Result can have 32 bits, convert it to unsigned - return result >>> 0; + return extensionType ? extensionType.fromJson(optionVal) : optionVal; } -exports.varint32read = varint32read; +exports.readMessageOption = readMessageOption; /***/ }), -/***/ 35435: +/***/ 81926: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; -// Public API of the protobuf-ts runtime. -// Note: we do not use `export * from ...` to help tree shakers, -// webpack verbose output hints that this should be useful -Object.defineProperty(exports, "__esModule", ({ value: true })); -// Convenience JSON typings and corresponding type guards -var json_typings_1 = __nccwpck_require__(26982); -Object.defineProperty(exports, "typeofJsonValue", ({ enumerable: true, get: function () { return json_typings_1.typeofJsonValue; } })); -Object.defineProperty(exports, "isJsonObject", ({ enumerable: true, get: function () { return json_typings_1.isJsonObject; } })); -// Base 64 encoding -var base64_1 = __nccwpck_require__(90943); -Object.defineProperty(exports, "base64decode", ({ enumerable: true, get: function () { return base64_1.base64decode; } })); -Object.defineProperty(exports, "base64encode", ({ enumerable: true, get: function () { return base64_1.base64encode; } })); -// UTF8 encoding -var protobufjs_utf8_1 = __nccwpck_require__(9910); -Object.defineProperty(exports, "utf8read", ({ enumerable: true, get: function () { return protobufjs_utf8_1.utf8read; } })); -// Binary format contracts, options for reading and writing, for example -var binary_format_contract_1 = __nccwpck_require__(24290); -Object.defineProperty(exports, "WireType", ({ enumerable: true, get: function () { return binary_format_contract_1.WireType; } })); -Object.defineProperty(exports, "mergeBinaryOptions", ({ enumerable: true, get: function () { return binary_format_contract_1.mergeBinaryOptions; } })); -Object.defineProperty(exports, "UnknownFieldHandler", ({ enumerable: true, get: function () { return binary_format_contract_1.UnknownFieldHandler; } })); -// Standard IBinaryReader implementation -var binary_reader_1 = __nccwpck_require__(15963); -Object.defineProperty(exports, "BinaryReader", ({ enumerable: true, get: function () { return binary_reader_1.BinaryReader; } })); -Object.defineProperty(exports, "binaryReadOptions", ({ enumerable: true, get: function () { return binary_reader_1.binaryReadOptions; } })); -// Standard IBinaryWriter implementation -var binary_writer_1 = __nccwpck_require__(11547); -Object.defineProperty(exports, "BinaryWriter", ({ enumerable: true, get: function () { return binary_writer_1.BinaryWriter; } })); -Object.defineProperty(exports, "binaryWriteOptions", ({ enumerable: true, get: function () { return binary_writer_1.binaryWriteOptions; } })); -// Int64 and UInt64 implementations required for the binary format -var pb_long_1 = __nccwpck_require__(19823); -Object.defineProperty(exports, "PbLong", ({ enumerable: true, get: function () { return pb_long_1.PbLong; } })); -Object.defineProperty(exports, "PbULong", ({ enumerable: true, get: function () { return pb_long_1.PbULong; } })); -// JSON format contracts, options for reading and writing, for example -var json_format_contract_1 = __nccwpck_require__(71426); -Object.defineProperty(exports, "jsonReadOptions", ({ enumerable: true, get: function () { return json_format_contract_1.jsonReadOptions; } })); -Object.defineProperty(exports, "jsonWriteOptions", ({ enumerable: true, get: function () { return json_format_contract_1.jsonWriteOptions; } })); -Object.defineProperty(exports, "mergeJsonOptions", ({ enumerable: true, get: function () { return json_format_contract_1.mergeJsonOptions; } })); -// Message type contract -var message_type_contract_1 = __nccwpck_require__(87554); -Object.defineProperty(exports, "MESSAGE_TYPE", ({ enumerable: true, get: function () { return message_type_contract_1.MESSAGE_TYPE; } })); -// Message type implementation via reflection -var message_type_1 = __nccwpck_require__(28923); -Object.defineProperty(exports, "MessageType", ({ enumerable: true, get: function () { return message_type_1.MessageType; } })); -// Reflection info, generated by the plugin, exposed to the user, used by reflection ops -var reflection_info_1 = __nccwpck_require__(48980); -Object.defineProperty(exports, "ScalarType", ({ enumerable: true, get: function () { return reflection_info_1.ScalarType; } })); -Object.defineProperty(exports, "LongType", ({ enumerable: true, get: function () { return reflection_info_1.LongType; } })); -Object.defineProperty(exports, "RepeatType", ({ enumerable: true, get: function () { return reflection_info_1.RepeatType; } })); -Object.defineProperty(exports, "normalizeFieldInfo", ({ enumerable: true, get: function () { return reflection_info_1.normalizeFieldInfo; } })); -Object.defineProperty(exports, "readFieldOptions", ({ enumerable: true, get: function () { return reflection_info_1.readFieldOptions; } })); -Object.defineProperty(exports, "readFieldOption", ({ enumerable: true, get: function () { return reflection_info_1.readFieldOption; } })); -Object.defineProperty(exports, "readMessageOption", ({ enumerable: true, get: function () { return reflection_info_1.readMessageOption; } })); -// Message operations via reflection -var reflection_type_check_1 = __nccwpck_require__(22526); -Object.defineProperty(exports, "ReflectionTypeCheck", ({ enumerable: true, get: function () { return reflection_type_check_1.ReflectionTypeCheck; } })); -var reflection_create_1 = __nccwpck_require__(86727); -Object.defineProperty(exports, "reflectionCreate", ({ enumerable: true, get: function () { return reflection_create_1.reflectionCreate; } })); -var reflection_scalar_default_1 = __nccwpck_require__(28441); -Object.defineProperty(exports, "reflectionScalarDefault", ({ enumerable: true, get: function () { return reflection_scalar_default_1.reflectionScalarDefault; } })); -var reflection_merge_partial_1 = __nccwpck_require__(57541); -Object.defineProperty(exports, "reflectionMergePartial", ({ enumerable: true, get: function () { return reflection_merge_partial_1.reflectionMergePartial; } })); -var reflection_equals_1 = __nccwpck_require__(19439); -Object.defineProperty(exports, "reflectionEquals", ({ enumerable: true, get: function () { return reflection_equals_1.reflectionEquals; } })); -var reflection_binary_reader_1 = __nccwpck_require__(46554); -Object.defineProperty(exports, "ReflectionBinaryReader", ({ enumerable: true, get: function () { return reflection_binary_reader_1.ReflectionBinaryReader; } })); -var reflection_binary_writer_1 = __nccwpck_require__(43632); -Object.defineProperty(exports, "ReflectionBinaryWriter", ({ enumerable: true, get: function () { return reflection_binary_writer_1.ReflectionBinaryWriter; } })); -var reflection_json_reader_1 = __nccwpck_require__(49661); -Object.defineProperty(exports, "ReflectionJsonReader", ({ enumerable: true, get: function () { return reflection_json_reader_1.ReflectionJsonReader; } })); -var reflection_json_writer_1 = __nccwpck_require__(94400); -Object.defineProperty(exports, "ReflectionJsonWriter", ({ enumerable: true, get: function () { return reflection_json_writer_1.ReflectionJsonWriter; } })); -var reflection_contains_message_type_1 = __nccwpck_require__(92609); -Object.defineProperty(exports, "containsMessageType", ({ enumerable: true, get: function () { return reflection_contains_message_type_1.containsMessageType; } })); -// Oneof helpers -var oneof_1 = __nccwpck_require__(52468); -Object.defineProperty(exports, "isOneofGroup", ({ enumerable: true, get: function () { return oneof_1.isOneofGroup; } })); -Object.defineProperty(exports, "setOneofValue", ({ enumerable: true, get: function () { return oneof_1.setOneofValue; } })); -Object.defineProperty(exports, "getOneofValue", ({ enumerable: true, get: function () { return oneof_1.getOneofValue; } })); -Object.defineProperty(exports, "clearOneofValue", ({ enumerable: true, get: function () { return oneof_1.clearOneofValue; } })); -Object.defineProperty(exports, "getSelectedOneofValue", ({ enumerable: true, get: function () { return oneof_1.getSelectedOneofValue; } })); -// Enum object type guard and reflection util, may be interesting to the user. -var enum_object_1 = __nccwpck_require__(12923); -Object.defineProperty(exports, "listEnumValues", ({ enumerable: true, get: function () { return enum_object_1.listEnumValues; } })); -Object.defineProperty(exports, "listEnumNames", ({ enumerable: true, get: function () { return enum_object_1.listEnumNames; } })); -Object.defineProperty(exports, "listEnumNumbers", ({ enumerable: true, get: function () { return enum_object_1.listEnumNumbers; } })); -Object.defineProperty(exports, "isEnumObject", ({ enumerable: true, get: function () { return enum_object_1.isEnumObject; } })); -// lowerCamelCase() is exported for plugin, rpc-runtime and other rpc packages -var lower_camel_case_1 = __nccwpck_require__(80821); -Object.defineProperty(exports, "lowerCamelCase", ({ enumerable: true, get: function () { return lower_camel_case_1.lowerCamelCase; } })); -// assertion functions are exported for plugin, may also be useful to user -var assert_1 = __nccwpck_require__(88501); -Object.defineProperty(exports, "assert", ({ enumerable: true, get: function () { return assert_1.assert; } })); -Object.defineProperty(exports, "assertNever", ({ enumerable: true, get: function () { return assert_1.assertNever; } })); -Object.defineProperty(exports, "assertInt32", ({ enumerable: true, get: function () { return assert_1.assertInt32; } })); -Object.defineProperty(exports, "assertUInt32", ({ enumerable: true, get: function () { return assert_1.assertUInt32; } })); -Object.defineProperty(exports, "assertFloat32", ({ enumerable: true, get: function () { return assert_1.assertFloat32; } })); - - -/***/ }), - -/***/ 71426: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.mergeJsonOptions = exports.jsonWriteOptions = exports.jsonReadOptions = void 0; -const defaultsWrite = { - emitDefaultValues: false, - enumAsInteger: false, - useProtoFieldName: false, - prettySpaces: 0, -}, defaultsRead = { - ignoreUnknownFields: false, -}; +exports.ReflectionJsonReader = void 0; +const json_typings_1 = __nccwpck_require__(66015); +const base64_1 = __nccwpck_require__(25081); +const reflection_info_1 = __nccwpck_require__(49354); +const pb_long_1 = __nccwpck_require__(9569); +const assert_1 = __nccwpck_require__(31589); +const reflection_long_convert_1 = __nccwpck_require__(98572); /** - * Make options for reading JSON data from partial options. - */ -function jsonReadOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; -} -exports.jsonReadOptions = jsonReadOptions; -/** - * Make options for writing JSON data from partial options. - */ -function jsonWriteOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; -} -exports.jsonWriteOptions = jsonWriteOptions; -/** - * Merges JSON write or read options. Later values override earlier values. Type registries are merged. - */ -function mergeJsonOptions(a, b) { - var _a, _b; - let c = Object.assign(Object.assign({}, a), b); - c.typeRegistry = [...((_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : []), ...((_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : [])]; - return c; -} -exports.mergeJsonOptions = mergeJsonOptions; - - -/***/ }), - -/***/ 26982: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isJsonObject = exports.typeofJsonValue = void 0; -/** - * Get the type of a JSON value. - * Distinguishes between array, null and object. - */ -function typeofJsonValue(value) { - let t = typeof value; - if (t == "object") { - if (Array.isArray(value)) - return "array"; - if (value === null) - return "null"; - } - return t; -} -exports.typeofJsonValue = typeofJsonValue; -/** - * Is this a JSON object (instead of an array or null)? - */ -function isJsonObject(value) { - return value !== null && typeof value == "object" && !Array.isArray(value); -} -exports.isJsonObject = isJsonObject; - - -/***/ }), - -/***/ 80821: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.lowerCamelCase = void 0; -/** - * Converts snake_case to lowerCamelCase. + * Reads proto3 messages in canonical JSON format using reflection information. * - * Should behave like protoc: - * https://github.com/protocolbuffers/protobuf/blob/e8ae137c96444ea313485ed1118c5e43b2099cf1/src/google/protobuf/compiler/java/java_helpers.cc#L118 + * https://developers.google.com/protocol-buffers/docs/proto3#json */ -function lowerCamelCase(snakeCase) { - let capNext = false; - const sb = []; - for (let i = 0; i < snakeCase.length; i++) { - let next = snakeCase.charAt(i); - if (next == '_') { - capNext = true; +class ReflectionJsonReader { + constructor(info) { + this.info = info; + } + prepare() { + var _a; + if (this.fMap === undefined) { + this.fMap = {}; + const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; + for (const field of fieldsInput) { + this.fMap[field.name] = field; + this.fMap[field.jsonName] = field; + this.fMap[field.localName] = field; + } } - else if (/\d/.test(next)) { - sb.push(next); - capNext = true; + } + // Cannot parse JSON for #. + assert(condition, fieldName, jsonValue) { + if (!condition) { + let what = json_typings_1.typeofJsonValue(jsonValue); + if (what == "number" || what == "boolean") + what = jsonValue.toString(); + throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); } - else if (capNext) { - sb.push(next.toUpperCase()); - capNext = false; + } + /** + * Reads a message from canonical JSON format into the target message. + * + * Repeated fields are appended. Map entries are added, overwriting + * existing keys. + * + * If a message field is already present, it will be merged with the + * new data. + */ + read(input, message, options) { + this.prepare(); + const oneofsHandled = []; + for (const [jsonKey, jsonValue] of Object.entries(input)) { + const field = this.fMap[jsonKey]; + if (!field) { + if (!options.ignoreUnknownFields) + throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); + continue; + } + const localName = field.localName; + // handle oneof ADT + let target; // this will be the target for the field value, whether it is member of a oneof or not + if (field.oneof) { + if (jsonValue === null && (field.kind !== 'enum' || field.T()[0] !== 'google.protobuf.NullValue')) { + continue; + } + // since json objects are unordered by specification, it is not possible to take the last of multiple oneofs + if (oneofsHandled.includes(field.oneof)) + throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); + oneofsHandled.push(field.oneof); + target = message[field.oneof] = { + oneofKind: localName + }; + } + else { + target = message; + } + // we have handled oneof above. we just have read the value into `target`. + if (field.kind == 'map') { + if (jsonValue === null) { + continue; + } + // check input + this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); + // our target to put map entries into + const fieldObj = target[localName]; + // read entries + for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { + this.assert(jsonObjValue !== null, field.name + " map value", null); + // read value + let val; + switch (field.V.kind) { + case "message": + val = field.V.T().internalJsonRead(jsonObjValue, options); + break; + case "enum": + val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + break; + case "scalar": + val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); + break; + } + this.assert(val !== undefined, field.name + " map value", jsonObjValue); + // read key + let key = jsonObjKey; + if (field.K == reflection_info_1.ScalarType.BOOL) + key = key == "true" ? true : key == "false" ? false : key; + key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); + fieldObj[key] = val; + } + } + else if (field.repeat) { + if (jsonValue === null) + continue; + // check input + this.assert(Array.isArray(jsonValue), field.name, jsonValue); + // our target to put array entries into + const fieldArr = target[localName]; + // read array entries + for (const jsonItem of jsonValue) { + this.assert(jsonItem !== null, field.name, null); + let val; + switch (field.kind) { + case "message": + val = field.T().internalJsonRead(jsonItem, options); + break; + case "enum": + val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + break; + case "scalar": + val = this.scalar(jsonItem, field.T, field.L, field.name); + break; + } + this.assert(val !== undefined, field.name, jsonValue); + fieldArr.push(val); + } + } + else { + switch (field.kind) { + case "message": + if (jsonValue === null && field.T().typeName != 'google.protobuf.Value') { + this.assert(field.oneof === undefined, field.name + " (oneof member)", null); + continue; + } + target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); + break; + case "enum": + let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + target[localName] = val; + break; + case "scalar": + target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); + break; + } + } } - else if (i == 0) { - sb.push(next.toLowerCase()); + } + /** + * Returns `false` for unrecognized string representations. + * + * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). + */ + enum(type, json, fieldName, ignoreUnknownFields) { + if (type[0] == 'google.protobuf.NullValue') + assert_1.assert(json === null || json === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} only accepts null.`); + if (json === null) + // we require 0 to be default value for all enums + return 0; + switch (typeof json) { + case "number": + assert_1.assert(Number.isInteger(json), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json}.`); + return json; + case "string": + let localEnumName = json; + if (type[2] && json.substring(0, type[2].length) === type[2]) + // lookup without the shared prefix + localEnumName = json.substring(type[2].length); + let enumNumber = type[1][localEnumName]; + if (typeof enumNumber === 'undefined' && ignoreUnknownFields) { + return false; + } + assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} has no value for "${json}".`); + return enumNumber; } - else { - sb.push(next); + assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json}".`); + } + scalar(json, type, longType, fieldName) { + let e; + try { + switch (type) { + // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". + // Either numbers or strings are accepted. Exponent notation is also accepted. + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + if (json === null) + return .0; + if (json === "NaN") + return Number.NaN; + if (json === "Infinity") + return Number.POSITIVE_INFINITY; + if (json === "-Infinity") + return Number.NEGATIVE_INFINITY; + if (json === "") { + e = "empty string"; + break; + } + if (typeof json == "string" && json.trim().length !== json.length) { + e = "extra whitespace"; + break; + } + if (typeof json != "string" && typeof json != "number") { + break; + } + let float = Number(json); + if (Number.isNaN(float)) { + e = "not a number"; + break; + } + if (!Number.isFinite(float)) { + // infinity and -infinity are handled by string representation above, so this is an error + e = "too large or small"; + break; + } + if (type == reflection_info_1.ScalarType.FLOAT) + assert_1.assertFloat32(float); + return float; + // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + case reflection_info_1.ScalarType.UINT32: + if (json === null) + return 0; + let int32; + if (typeof json == "number") + int32 = json; + else if (json === "") + e = "empty string"; + else if (typeof json == "string") { + if (json.trim().length !== json.length) + e = "extra whitespace"; + else + int32 = Number(json); + } + if (int32 === undefined) + break; + if (type == reflection_info_1.ScalarType.UINT32) + assert_1.assertUInt32(int32); + else + assert_1.assertInt32(int32); + return int32; + // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + if (json === null) + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); + if (typeof json != "number" && typeof json != "string") + break; + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json), longType); + case reflection_info_1.ScalarType.FIXED64: + case reflection_info_1.ScalarType.UINT64: + if (json === null) + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); + if (typeof json != "number" && typeof json != "string") + break; + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json), longType); + // bool: + case reflection_info_1.ScalarType.BOOL: + if (json === null) + return false; + if (typeof json !== "boolean") + break; + return json; + // string: + case reflection_info_1.ScalarType.STRING: + if (json === null) + return ""; + if (typeof json !== "string") { + e = "extra whitespace"; + break; + } + try { + encodeURIComponent(json); + } + catch (e) { + e = "invalid UTF8"; + break; + } + return json; + // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. + // Either standard or URL-safe base64 encoding with/without paddings are accepted. + case reflection_info_1.ScalarType.BYTES: + if (json === null || json === "") + return new Uint8Array(0); + if (typeof json !== 'string') + break; + return base64_1.base64decode(json); + } } + catch (error) { + e = error.message; + } + this.assert(false, fieldName + (e ? " - " + e : ""), json); } - return sb.join(''); } -exports.lowerCamelCase = lowerCamelCase; - - -/***/ }), - -/***/ 87554: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MESSAGE_TYPE = void 0; -/** - * The symbol used as a key on message objects to store the message type. - * - * Note that this is an experimental feature - it is here to stay, but - * implementation details may change without notice. - */ -exports.MESSAGE_TYPE = Symbol.for("protobuf-ts/message-type"); +exports.ReflectionJsonReader = ReflectionJsonReader; /***/ }), -/***/ 28923: +/***/ 86596: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.MessageType = void 0; -const message_type_contract_1 = __nccwpck_require__(87554); -const reflection_info_1 = __nccwpck_require__(48980); -const reflection_type_check_1 = __nccwpck_require__(22526); -const reflection_json_reader_1 = __nccwpck_require__(49661); -const reflection_json_writer_1 = __nccwpck_require__(94400); -const reflection_binary_reader_1 = __nccwpck_require__(46554); -const reflection_binary_writer_1 = __nccwpck_require__(43632); -const reflection_create_1 = __nccwpck_require__(86727); -const reflection_merge_partial_1 = __nccwpck_require__(57541); -const json_typings_1 = __nccwpck_require__(26982); -const json_format_contract_1 = __nccwpck_require__(71426); -const reflection_equals_1 = __nccwpck_require__(19439); -const binary_writer_1 = __nccwpck_require__(11547); -const binary_reader_1 = __nccwpck_require__(15963); -const baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); +exports.ReflectionJsonWriter = void 0; +const base64_1 = __nccwpck_require__(25081); +const pb_long_1 = __nccwpck_require__(9569); +const reflection_info_1 = __nccwpck_require__(49354); +const assert_1 = __nccwpck_require__(31589); /** - * This standard message type provides reflection-based - * operations to work with a message. + * Writes proto3 messages in canonical JSON format using reflection + * information. + * + * https://developers.google.com/protocol-buffers/docs/proto3#json */ -class MessageType { - constructor(name, fields, options) { - this.defaultCheckDepth = 16; - this.typeName = name; - this.fields = fields.map(reflection_info_1.normalizeFieldInfo); - this.options = options !== null && options !== void 0 ? options : {}; - this.messagePrototype = Object.create(null, Object.assign(Object.assign({}, baseDescriptors), { [message_type_contract_1.MESSAGE_TYPE]: { value: this } })); - this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); - this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); - this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); - this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); - this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); - } - create(value) { - let message = reflection_create_1.reflectionCreate(this); - if (value !== undefined) { - reflection_merge_partial_1.reflectionMergePartial(this, message, value); - } - return message; - } - /** - * Clone the message. - * - * Unknown fields are discarded. - */ - clone(message) { - let copy = this.create(); - reflection_merge_partial_1.reflectionMergePartial(this, copy, message); - return copy; - } - /** - * Determines whether two message of the same type have the same field values. - * Checks for deep equality, traversing repeated fields, oneof groups, maps - * and messages recursively. - * Will also return true if both messages are `undefined`. - */ - equals(a, b) { - return reflection_equals_1.reflectionEquals(this, a, b); - } - /** - * Is the given value assignable to our message type - * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? - */ - is(arg, depth = this.defaultCheckDepth) { - return this.refTypeCheck.is(arg, depth, false); - } - /** - * Is the given value assignable to our message type, - * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? - */ - isAssignable(arg, depth = this.defaultCheckDepth) { - return this.refTypeCheck.is(arg, depth, true); - } - /** - * Copy partial data into the target message. - */ - mergePartial(target, source) { - reflection_merge_partial_1.reflectionMergePartial(this, target, source); - } - /** - * Create a new message from binary format. - */ - fromBinary(data, options) { - let opt = binary_reader_1.binaryReadOptions(options); - return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); - } - /** - * Read a new message from a JSON value. - */ - fromJson(json, options) { - return this.internalJsonRead(json, json_format_contract_1.jsonReadOptions(options)); - } - /** - * Read a new message from a JSON string. - * This is equivalent to `T.fromJson(JSON.parse(json))`. - */ - fromJsonString(json, options) { - let value = JSON.parse(json); - return this.fromJson(value, options); - } - /** - * Write the message to canonical JSON value. - */ - toJson(message, options) { - return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); - } - /** - * Convert the message to canonical JSON string. - * This is equivalent to `JSON.stringify(T.toJson(t))` - */ - toJsonString(message, options) { +class ReflectionJsonWriter { + constructor(info) { var _a; - let value = this.toJson(message, options); - return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); + this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; } /** - * Write the message to binary format. + * Converts the message to a JSON object, based on the field descriptors. */ - toBinary(message, options) { - let opt = binary_writer_1.binaryWriteOptions(options); - return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); - } - /** - * This is an internal method. If you just want to read a message from - * JSON, use `fromJson()` or `fromJsonString()`. - * - * Reads JSON value and merges the fields into the target - * according to protobuf rules. If the target is omitted, - * a new instance is created first. - */ - internalJsonRead(json, options, target) { - if (json !== null && typeof json == "object" && !Array.isArray(json)) { - let message = target !== null && target !== void 0 ? target : this.create(); - this.refJsonReader.read(json, message, options); - return message; + write(message, options) { + const json = {}, source = message; + for (const field of this.fields) { + // field is not part of a oneof, simply write as is + if (!field.oneof) { + let jsonValue = this.field(field, source[field.localName], options); + if (jsonValue !== undefined) + json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; + continue; + } + // field is part of a oneof + const group = source[field.oneof]; + if (group.oneofKind !== field.localName) + continue; // not selected, skip + const opt = field.kind == 'scalar' || field.kind == 'enum' + ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; + let jsonValue = this.field(field, group[field.localName], opt); + assert_1.assert(jsonValue !== undefined); + json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; } - throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json)}.`); + return json; } - /** - * This is an internal method. If you just want to write a message - * to JSON, use `toJson()` or `toJsonString(). - * - * Writes JSON value and returns it. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.write(message, options); + field(field, value, options) { + let jsonValue = undefined; + if (field.kind == 'map') { + assert_1.assert(typeof value == "object" && value !== null); + const jsonObj = {}; + switch (field.V.kind) { + case "scalar": + for (const [entryKey, entryValue] of Object.entries(value)) { + const val = this.scalar(field.V.T, entryValue, field.name, false, true); + assert_1.assert(val !== undefined); + jsonObj[entryKey.toString()] = val; // JSON standard allows only (double quoted) string as property key + } + break; + case "message": + const messageType = field.V.T(); + for (const [entryKey, entryValue] of Object.entries(value)) { + const val = this.message(messageType, entryValue, field.name, options); + assert_1.assert(val !== undefined); + jsonObj[entryKey.toString()] = val; // JSON standard allows only (double quoted) string as property key + } + break; + case "enum": + const enumInfo = field.V.T(); + for (const [entryKey, entryValue] of Object.entries(value)) { + assert_1.assert(entryValue === undefined || typeof entryValue == 'number'); + const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); + assert_1.assert(val !== undefined); + jsonObj[entryKey.toString()] = val; // JSON standard allows only (double quoted) string as property key + } + break; + } + if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) + jsonValue = jsonObj; + } + else if (field.repeat) { + assert_1.assert(Array.isArray(value)); + const jsonArr = []; + switch (field.kind) { + case "scalar": + for (let i = 0; i < value.length; i++) { + const val = this.scalar(field.T, value[i], field.name, field.opt, true); + assert_1.assert(val !== undefined); + jsonArr.push(val); + } + break; + case "enum": + const enumInfo = field.T(); + for (let i = 0; i < value.length; i++) { + assert_1.assert(value[i] === undefined || typeof value[i] == 'number'); + const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); + assert_1.assert(val !== undefined); + jsonArr.push(val); + } + break; + case "message": + const messageType = field.T(); + for (let i = 0; i < value.length; i++) { + const val = this.message(messageType, value[i], field.name, options); + assert_1.assert(val !== undefined); + jsonArr.push(val); + } + break; + } + // add converted array to json output + if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) + jsonValue = jsonArr; + } + else { + switch (field.kind) { + case "scalar": + jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); + break; + case "enum": + jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); + break; + case "message": + jsonValue = this.message(field.T(), value, field.name, options); + break; + } + } + return jsonValue; } /** - * This is an internal method. If you just want to write a message - * in binary format, use `toBinary()`. - * - * Serializes the message in binary format and appends it to the given - * writer. Returns passed writer. + * Returns `null` as the default for google.protobuf.NullValue. */ - internalBinaryWrite(message, writer, options) { - this.refBinWriter.write(message, writer, options); - return writer; + enum(type, value, fieldName, optional, emitDefaultValues, enumAsInteger) { + if (type[0] == 'google.protobuf.NullValue') + return !emitDefaultValues && !optional ? undefined : null; + if (value === undefined) { + assert_1.assert(optional); + return undefined; + } + if (value === 0 && !emitDefaultValues && !optional) + // we require 0 to be default value for all enums + return undefined; + assert_1.assert(typeof value == 'number'); + assert_1.assert(Number.isInteger(value)); + if (enumAsInteger || !type[1].hasOwnProperty(value)) + // if we don't now the enum value, just return the number + return value; + if (type[2]) + // restore the dropped prefix + return type[2] + type[1][value]; + return type[1][value]; } - /** - * This is an internal method. If you just want to read a message from - * binary data, use `fromBinary()`. - * - * Reads data from binary format and merges the fields into - * the target according to protobuf rules. If the target is - * omitted, a new instance is created first. - */ - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(); - this.refBinReader.read(reader, message, options, length); - return message; + message(type, value, fieldName, options) { + if (value === undefined) + return options.emitDefaultValues ? null : undefined; + return type.internalJsonWrite(value, options); + } + scalar(type, value, fieldName, optional, emitDefaultValues) { + if (value === undefined) { + assert_1.assert(optional); + return undefined; + } + const ed = emitDefaultValues || optional; + // noinspection FallThroughInSwitchStatementJS + switch (type) { + // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + if (value === 0) + return ed ? 0 : undefined; + assert_1.assertInt32(value); + return value; + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.UINT32: + if (value === 0) + return ed ? 0 : undefined; + assert_1.assertUInt32(value); + return value; + // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". + // Either numbers or strings are accepted. Exponent notation is also accepted. + case reflection_info_1.ScalarType.FLOAT: + assert_1.assertFloat32(value); + case reflection_info_1.ScalarType.DOUBLE: + if (value === 0) + return ed ? 0 : undefined; + assert_1.assert(typeof value == 'number'); + if (Number.isNaN(value)) + return 'NaN'; + if (value === Number.POSITIVE_INFINITY) + return 'Infinity'; + if (value === Number.NEGATIVE_INFINITY) + return '-Infinity'; + return value; + // string: + case reflection_info_1.ScalarType.STRING: + if (value === "") + return ed ? '' : undefined; + assert_1.assert(typeof value == 'string'); + return value; + // bool: + case reflection_info_1.ScalarType.BOOL: + if (value === false) + return ed ? false : undefined; + assert_1.assert(typeof value == 'boolean'); + return value; + // JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + assert_1.assert(typeof value == 'number' || typeof value == 'string' || typeof value == 'bigint'); + let ulong = pb_long_1.PbULong.from(value); + if (ulong.isZero() && !ed) + return undefined; + return ulong.toString(); + // JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + assert_1.assert(typeof value == 'number' || typeof value == 'string' || typeof value == 'bigint'); + let long = pb_long_1.PbLong.from(value); + if (long.isZero() && !ed) + return undefined; + return long.toString(); + // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. + // Either standard or URL-safe base64 encoding with/without paddings are accepted. + case reflection_info_1.ScalarType.BYTES: + assert_1.assert(value instanceof Uint8Array); + if (!value.byteLength) + return ed ? "" : undefined; + return base64_1.base64encode(value); + } } } -exports.MessageType = MessageType; +exports.ReflectionJsonWriter = ReflectionJsonWriter; /***/ }), -/***/ 52468: -/***/ ((__unused_webpack_module, exports) => { +/***/ 98572: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSelectedOneofValue = exports.clearOneofValue = exports.setUnknownOneofValue = exports.setOneofValue = exports.getOneofValue = exports.isOneofGroup = void 0; +exports.reflectionLongConvert = void 0; +const reflection_info_1 = __nccwpck_require__(49354); /** - * Is the given value a valid oneof group? - * - * We represent protobuf `oneof` as algebraic data types (ADT) in generated - * code. But when working with messages of unknown type, the ADT does not - * help us. - * - * This type guard checks if the given object adheres to the ADT rules, which - * are as follows: - * - * 1) Must be an object. - * - * 2) Must have a "oneofKind" discriminator property. - * - * 3) If "oneofKind" is `undefined`, no member field is selected. The object - * must not have any other properties. - * - * 4) If "oneofKind" is a `string`, the member field with this name is - * selected. - * - * 5) If a member field is selected, the object must have a second property - * with this name. The property must not be `undefined`. - * - * 6) No extra properties are allowed. The object has either one property - * (no selection) or two properties (selection). + * Utility method to convert a PbLong or PbUlong to a JavaScript + * representation during runtime. * + * Works with generated field information, `undefined` is equivalent + * to `STRING`. */ -function isOneofGroup(any) { - if (typeof any != 'object' || any === null || !any.hasOwnProperty('oneofKind')) { - return false; - } - switch (typeof any.oneofKind) { - case "string": - if (any[any.oneofKind] === undefined) - return false; - return Object.keys(any).length == 2; - case "undefined": - return Object.keys(any).length == 1; +function reflectionLongConvert(long, type) { + switch (type) { + case reflection_info_1.LongType.BIGINT: + return long.toBigInt(); + case reflection_info_1.LongType.NUMBER: + return long.toNumber(); default: - return false; - } -} -exports.isOneofGroup = isOneofGroup; -/** - * Returns the value of the given field in a oneof group. - */ -function getOneofValue(oneof, kind) { - return oneof[kind]; -} -exports.getOneofValue = getOneofValue; -function setOneofValue(oneof, kind, value) { - if (oneof.oneofKind !== undefined) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = kind; - if (value !== undefined) { - oneof[kind] = value; - } -} -exports.setOneofValue = setOneofValue; -function setUnknownOneofValue(oneof, kind, value) { - if (oneof.oneofKind !== undefined) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = kind; - if (value !== undefined && kind !== undefined) { - oneof[kind] = value; + // case undefined: + // case LongType.STRING: + return long.toString(); } } -exports.setUnknownOneofValue = setUnknownOneofValue; +exports.reflectionLongConvert = reflectionLongConvert; + + +/***/ }), + +/***/ 44302: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.reflectionMergePartial = void 0; /** - * Removes the selected field in a oneof group. + * Copy partial data into the target message. * - * Note that the recommended way to modify a oneof group is to set - * a new object: + * If a singular scalar or enum field is present in the source, it + * replaces the field in the target. * - * ```ts - * message.result = { oneofKind: undefined }; - * ``` - */ -function clearOneofValue(oneof) { - if (oneof.oneofKind !== undefined) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = undefined; -} -exports.clearOneofValue = clearOneofValue; -/** - * Returns the selected value of the given oneof group. + * If a singular message field is present in the source, it is merged + * with the target field by calling mergePartial() of the responsible + * message type. * - * Not that the recommended way to access a oneof group is to check - * the "oneofKind" property and let TypeScript narrow down the union - * type for you: + * If a repeated field is present in the source, its values replace + * all values in the target array, removing extraneous values. + * Repeated message fields are copied, not merged. * - * ```ts - * if (message.result.oneofKind === "error") { - * message.result.error; // string - * } - * ``` + * If a map field is present in the source, entries are added to the + * target map, replacing entries with the same key. Entries that only + * exist in the target remain. Entries with message values are copied, + * not merged. * - * In the rare case you just need the value, and do not care about - * which protobuf field is selected, you can use this function - * for convenience. + * Note that this function differs from protobuf merge semantics, + * which appends repeated fields. */ -function getSelectedOneofValue(oneof) { - if (oneof.oneofKind === undefined) { - return undefined; +function reflectionMergePartial(info, target, source) { + let fieldValue, // the field value we are working with + input = source, output; // where we want our field value to go + for (let field of info.fields) { + let name = field.localName; + if (field.oneof) { + const group = input[field.oneof]; // this is the oneof`s group in the source + if ((group === null || group === void 0 ? void 0 : group.oneofKind) == undefined) { // the user is free to omit + continue; // we skip this field, and all other members too + } + fieldValue = group[name]; // our value comes from the the oneof group of the source + output = target[field.oneof]; // and our output is the oneof group of the target + output.oneofKind = group.oneofKind; // always update discriminator + if (fieldValue == undefined) { + delete output[name]; // remove any existing value + continue; // skip further work on field + } + } + else { + fieldValue = input[name]; // we are using the source directly + output = target; // we want our field value to go directly into the target + if (fieldValue == undefined) { + continue; // skip further work on field, existing value is used as is + } + } + if (field.repeat) + output[name].length = fieldValue.length; // resize target array to match source array + // now we just work with `fieldValue` and `output` to merge the value + switch (field.kind) { + case "scalar": + case "enum": + if (field.repeat) + for (let i = 0; i < fieldValue.length; i++) + output[name][i] = fieldValue[i]; // not a reference type + else + output[name] = fieldValue; // not a reference type + break; + case "message": + let T = field.T(); + if (field.repeat) + for (let i = 0; i < fieldValue.length; i++) + output[name][i] = T.create(fieldValue[i]); + else if (output[name] === undefined) + output[name] = T.create(fieldValue); // nothing to merge with + else + T.mergePartial(output[name], fieldValue); + break; + case "map": + // Map and repeated fields are simply overwritten, not appended or merged + switch (field.V.kind) { + case "scalar": + case "enum": + Object.assign(output[name], fieldValue); // elements are not reference types + break; + case "message": + let T = field.V.T(); + for (let k of Object.keys(fieldValue)) + output[name][k] = T.create(fieldValue[k]); + break; + } + break; + } } - return oneof[oneof.oneofKind]; } -exports.getSelectedOneofValue = getSelectedOneofValue; +exports.reflectionMergePartial = reflectionMergePartial; /***/ }), -/***/ 19823: +/***/ 40809: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PbLong = exports.PbULong = exports.detectBi = void 0; -const goog_varint_1 = __nccwpck_require__(58378); -let BI; -function detectBi() { - const dv = new DataView(new ArrayBuffer(8)); - const ok = globalThis.BigInt !== undefined - && typeof dv.getBigInt64 === "function" - && typeof dv.getBigUint64 === "function" - && typeof dv.setBigInt64 === "function" - && typeof dv.setBigUint64 === "function"; - BI = ok ? { - MIN: BigInt("-9223372036854775808"), - MAX: BigInt("9223372036854775807"), - UMIN: BigInt("0"), - UMAX: BigInt("18446744073709551615"), - C: BigInt, - V: dv, - } : undefined; -} -exports.detectBi = detectBi; -detectBi(); -function assertBi(bi) { - if (!bi) - throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); -} -// used to validate from(string) input (when bigint is unavailable) -const RE_DECIMAL_STR = /^-?[0-9]+$/; -// constants for binary math -const TWO_PWR_32_DBL = 0x100000000; -const HALF_2_PWR_32 = 0x080000000; -// base class for PbLong and PbULong provides shared code -class SharedPbLong { - /** - * Create a new instance with the given bits. - */ - constructor(lo, hi) { - this.lo = lo | 0; - this.hi = hi | 0; - } - /** - * Is this instance equal to 0? - */ - isZero() { - return this.lo == 0 && this.hi == 0; - } - /** - * Convert to a native number. - */ - toNumber() { - let result = this.hi * TWO_PWR_32_DBL + (this.lo >>> 0); - if (!Number.isSafeInteger(result)) - throw new Error("cannot convert to safe number"); - return result; - } -} +exports.reflectionScalarDefault = void 0; +const reflection_info_1 = __nccwpck_require__(49354); +const reflection_long_convert_1 = __nccwpck_require__(98572); +const pb_long_1 = __nccwpck_require__(9569); /** - * 64-bit unsigned integer as two 32-bit values. - * Converts between `string`, `number` and `bigint` representations. + * Creates the default value for a scalar type. */ -class PbULong extends SharedPbLong { - /** - * Create instance from a `string`, `number` or `bigint`. - */ - static from(value) { - if (BI) - // noinspection FallThroughInSwitchStatementJS - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - if (value == "") - throw new Error('string is no integer'); - value = BI.C(value); - case "number": - if (value === 0) - return this.ZERO; - value = BI.C(value); - case "bigint": - if (!value) - return this.ZERO; - if (value < BI.UMIN) - throw new Error('signed value for ulong'); - if (value > BI.UMAX) - throw new Error('ulong too large'); - BI.V.setBigUint64(0, value, true); - return new PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); +function reflectionScalarDefault(type, longType = reflection_info_1.LongType.STRING) { + switch (type) { + case reflection_info_1.ScalarType.BOOL: + return false; + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + return 0.0; + case reflection_info_1.ScalarType.BYTES: + return new Uint8Array(0); + case reflection_info_1.ScalarType.STRING: + return ""; + default: + // case ScalarType.INT32: + // case ScalarType.UINT32: + // case ScalarType.SINT32: + // case ScalarType.FIXED32: + // case ScalarType.SFIXED32: + return 0; + } +} +exports.reflectionScalarDefault = reflectionScalarDefault; + + +/***/ }), + +/***/ 67737: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ReflectionTypeCheck = void 0; +const reflection_info_1 = __nccwpck_require__(49354); +const oneof_1 = __nccwpck_require__(76391); +// noinspection JSMethodCanBeStatic +class ReflectionTypeCheck { + constructor(info) { + var _a; + this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; + } + prepare() { + if (this.data) + return; + const req = [], known = [], oneofs = []; + for (let field of this.fields) { + if (field.oneof) { + if (!oneofs.includes(field.oneof)) { + oneofs.push(field.oneof); + req.push(field.oneof); + known.push(field.oneof); + } } - else - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - value = value.trim(); - if (!RE_DECIMAL_STR.test(value)) - throw new Error('string is no integer'); - let [minus, lo, hi] = goog_varint_1.int64fromString(value); - if (minus) - throw new Error('signed value for ulong'); - return new PbULong(lo, hi); - case "number": - if (value == 0) - return this.ZERO; - if (!Number.isSafeInteger(value)) - throw new Error('number is no integer'); - if (value < 0) - throw new Error('signed value for ulong'); - return new PbULong(value, value / TWO_PWR_32_DBL); + else { + known.push(field.localName); + switch (field.kind) { + case "scalar": + case "enum": + if (!field.opt || field.repeat) + req.push(field.localName); + break; + case "message": + if (field.repeat) + req.push(field.localName); + break; + case "map": + req.push(field.localName); + break; + } } - throw new Error('unknown value ' + typeof value); + } + this.data = { req, known, oneofs: Object.values(oneofs) }; } /** - * Convert to decimal string. + * Is the argument a valid message as specified by the + * reflection information? + * + * Checks all field types recursively. The `depth` + * specifies how deep into the structure the check will be. + * + * With a depth of 0, only the presence of fields + * is checked. + * + * With a depth of 1 or more, the field types are checked. + * + * With a depth of 2 or more, the members of map, repeated + * and message fields are checked. + * + * Message fields will be checked recursively with depth - 1. + * + * The number of map entries / repeated values being checked + * is < depth. */ - toString() { - return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); + is(message, depth, allowExcessProperties = false) { + if (depth < 0) + return true; + if (message === null || message === undefined || typeof message != 'object') + return false; + this.prepare(); + let keys = Object.keys(message), data = this.data; + // if a required field is missing in arg, this cannot be a T + if (keys.length < data.req.length || data.req.some(n => !keys.includes(n))) + return false; + if (!allowExcessProperties) { + // if the arg contains a key we dont know, this is not a literal T + if (keys.some(k => !data.known.includes(k))) + return false; + } + // "With a depth of 0, only the presence and absence of fields is checked." + // "With a depth of 1 or more, the field types are checked." + if (depth < 1) { + return true; + } + // check oneof group + for (const name of data.oneofs) { + const group = message[name]; + if (!oneof_1.isOneofGroup(group)) + return false; + if (group.oneofKind === undefined) + continue; + const field = this.fields.find(f => f.localName === group.oneofKind); + if (!field) + return false; // we found no field, but have a kind, something is wrong + if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) + return false; + } + // check types + for (const field of this.fields) { + if (field.oneof !== undefined) + continue; + if (!this.field(message[field.localName], field, allowExcessProperties, depth)) + return false; + } + return true; } - /** - * Convert to native bigint. - */ - toBigInt() { - assertBi(BI); - BI.V.setInt32(0, this.lo, true); - BI.V.setInt32(4, this.hi, true); - return BI.V.getBigUint64(0, true); + field(arg, field, allowExcessProperties, depth) { + let repeated = field.repeat; + switch (field.kind) { + case "scalar": + if (arg === undefined) + return field.opt; + if (repeated) + return this.scalars(arg, field.T, depth, field.L); + return this.scalar(arg, field.T, field.L); + case "enum": + if (arg === undefined) + return field.opt; + if (repeated) + return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); + return this.scalar(arg, reflection_info_1.ScalarType.INT32); + case "message": + if (arg === undefined) + return true; + if (repeated) + return this.messages(arg, field.T(), allowExcessProperties, depth); + return this.message(arg, field.T(), allowExcessProperties, depth); + case "map": + if (typeof arg != 'object' || arg === null) + return false; + if (depth < 2) + return true; + if (!this.mapKeys(arg, field.K, depth)) + return false; + switch (field.V.kind) { + case "scalar": + return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); + case "enum": + return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); + case "message": + return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); + } + break; + } + return true; + } + message(arg, type, allowExcessProperties, depth) { + if (allowExcessProperties) { + return type.isAssignable(arg, depth); + } + return type.is(arg, depth); + } + messages(arg, type, allowExcessProperties, depth) { + if (!Array.isArray(arg)) + return false; + if (depth < 2) + return true; + if (allowExcessProperties) { + for (let i = 0; i < arg.length && i < depth; i++) + if (!type.isAssignable(arg[i], depth - 1)) + return false; + } + else { + for (let i = 0; i < arg.length && i < depth; i++) + if (!type.is(arg[i], depth - 1)) + return false; + } + return true; + } + scalar(arg, type, longType) { + let argType = typeof arg; + switch (type) { + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + switch (longType) { + case reflection_info_1.LongType.BIGINT: + return argType == "bigint"; + case reflection_info_1.LongType.NUMBER: + return argType == "number" && !isNaN(arg); + default: + return argType == "string"; + } + case reflection_info_1.ScalarType.BOOL: + return argType == 'boolean'; + case reflection_info_1.ScalarType.STRING: + return argType == 'string'; + case reflection_info_1.ScalarType.BYTES: + return arg instanceof Uint8Array; + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + return argType == 'number' && !isNaN(arg); + default: + // case ScalarType.UINT32: + // case ScalarType.FIXED32: + // case ScalarType.INT32: + // case ScalarType.SINT32: + // case ScalarType.SFIXED32: + return argType == 'number' && Number.isInteger(arg); + } + } + scalars(arg, type, depth, longType) { + if (!Array.isArray(arg)) + return false; + if (depth < 2) + return true; + if (Array.isArray(arg)) + for (let i = 0; i < arg.length && i < depth; i++) + if (!this.scalar(arg[i], type, longType)) + return false; + return true; + } + mapKeys(map, type, depth) { + let keys = Object.keys(map); + switch (type) { + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + case reflection_info_1.ScalarType.UINT32: + return this.scalars(keys.slice(0, depth).map(k => parseInt(k)), type, depth); + case reflection_info_1.ScalarType.BOOL: + return this.scalars(keys.slice(0, depth).map(k => k == 'true' ? true : k == 'false' ? false : k), type, depth); + default: + return this.scalars(keys, type, depth, reflection_info_1.LongType.STRING); + } } } -exports.PbULong = PbULong; +exports.ReflectionTypeCheck = ReflectionTypeCheck; + + +/***/ }), + +/***/ 71935: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; /** - * ulong 0 singleton. + * @author Toru Nagashima + * See LICENSE file in root directory for full license. */ -PbULong.ZERO = new PbULong(0, 0); + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var eventTargetShim = __nccwpck_require__(68963); + /** - * 64-bit signed integer as two 32-bit values. - * Converts between `string`, `number` and `bigint` representations. + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal */ -class PbLong extends SharedPbLong { +class AbortSignal extends eventTargetShim.EventTarget { /** - * Create instance from a `string`, `number` or `bigint`. + * AbortSignal cannot be constructed directly. */ - static from(value) { - if (BI) - // noinspection FallThroughInSwitchStatementJS - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - if (value == "") - throw new Error('string is no integer'); - value = BI.C(value); - case "number": - if (value === 0) - return this.ZERO; - value = BI.C(value); - case "bigint": - if (!value) - return this.ZERO; - if (value < BI.MIN) - throw new Error('signed long too small'); - if (value > BI.MAX) - throw new Error('signed long too large'); - BI.V.setBigInt64(0, value, true); - return new PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); - } - else - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - value = value.trim(); - if (!RE_DECIMAL_STR.test(value)) - throw new Error('string is no integer'); - let [minus, lo, hi] = goog_varint_1.int64fromString(value); - if (minus) { - if (hi > HALF_2_PWR_32 || (hi == HALF_2_PWR_32 && lo != 0)) - throw new Error('signed long too small'); - } - else if (hi >= HALF_2_PWR_32) - throw new Error('signed long too large'); - let pbl = new PbLong(lo, hi); - return minus ? pbl.negate() : pbl; - case "number": - if (value == 0) - return this.ZERO; - if (!Number.isSafeInteger(value)) - throw new Error('number is no integer'); - return value > 0 - ? new PbLong(value, value / TWO_PWR_32_DBL) - : new PbLong(-value, -value / TWO_PWR_32_DBL).negate(); - } - throw new Error('unknown value ' + typeof value); + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); } /** - * Do we have a minus sign? + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. */ - isNegative() { - return (this.hi & HALF_2_PWR_32) !== 0; + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { /** - * Negate two's complement. - * Invert all the bits and add one to the result. + * Initialize this controller. */ - negate() { - let hi = ~this.hi, lo = this.lo; - if (lo) - lo = ~lo + 1; - else - hi += 1; - return new PbLong(lo, hi); + constructor() { + signals.set(this, createAbortSignal()); } /** - * Convert to decimal string. + * Returns the `AbortSignal` object associated with this object. */ - toString() { - if (BI) - return this.toBigInt().toString(); - if (this.isNegative()) { - let n = this.negate(); - return '-' + goog_varint_1.int64toString(n.lo, n.hi); - } - return goog_varint_1.int64toString(this.lo, this.hi); + get signal() { + return getSignal(this); } /** - * Convert to native bigint. + * Abort and signal to any observers that the associated activity is to be aborted. */ - toBigInt() { - assertBi(BI); - BI.V.setInt32(0, this.lo, true); - BI.V.setInt32(4, this.hi, true); - return BI.V.getBigInt64(0, true); + abort() { + abortSignal(getSignal(this)); } } -exports.PbLong = PbLong; /** - * long 0 singleton. + * Associated signals. */ -PbLong.ZERO = new PbLong(0, 0); +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports["default"] = AbortController; + +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map /***/ }), -/***/ 9910: -/***/ ((__unused_webpack_module, exports) => { +/***/ 29638: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -// Copyright (c) 2016, Daniel Wirtz All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// * Neither the name of its author, nor the names of its contributors -// may be used to endorse or promote products derived from this software -// without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.utf8read = void 0; -const fromCharCodes = (chunk) => String.fromCharCode.apply(String, chunk); -/** - * @deprecated This function will no longer be exported with the next major - * release, since protobuf-ts has switch to TextDecoder API. If you need this - * function, please migrate to @protobufjs/utf8. For context, see - * https://github.com/timostamm/protobuf-ts/issues/184 - * - * Reads UTF8 bytes as a string. - * - * See [protobufjs / utf8](https://github.com/protobufjs/protobuf.js/blob/9893e35b854621cce64af4bf6be2cff4fb892796/lib/utf8/index.js#L40) - * - * Copyright (c) 2016, Daniel Wirtz - */ -function utf8read(bytes) { - if (bytes.length < 1) - return ""; - let pos = 0, // position in bytes - parts = [], chunk = [], i = 0, // char offset - t; // temporary - let len = bytes.length; - while (pos < len) { - t = bytes[pos++]; - if (t < 128) - chunk[i++] = t; - else if (t > 191 && t < 224) - chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; - else if (t > 239 && t < 365) { - t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 0x10000; - chunk[i++] = 0xD800 + (t >> 10); - chunk[i++] = 0xDC00 + (t & 1023); - } - else - chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; - if (i > 8191) { - parts.push(fromCharCodes(chunk)); - i = 0; - } +exports.req = exports.json = exports.toBuffer = void 0; +const http = __importStar(__nccwpck_require__(13685)); +const https = __importStar(__nccwpck_require__(95687)); +async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); } - if (parts.length) { - if (i) - parts.push(fromCharCodes(chunk.slice(0, i))); - return parts.join(""); + return Buffer.concat(chunks, length); +} +exports.toBuffer = toBuffer; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function json(stream) { + const buf = await toBuffer(stream); + const str = buf.toString('utf8'); + try { + return JSON.parse(str); + } + catch (_err) { + const err = _err; + err.message += ` (input: ${str})`; + throw err; } - return fromCharCodes(chunk.slice(0, i)); } -exports.utf8read = utf8read; - +exports.json = json; +function req(url, opts = {}) { + const href = typeof url === 'string' ? url : url.href; + const req = (href.startsWith('https:') ? https : http).request(url, opts); + const promise = new Promise((resolve, reject) => { + req + .once('response', resolve) + .once('error', reject) + .end(); + }); + req.then = promise.then.bind(promise); + return req; +} +exports.req = req; +//# sourceMappingURL=helpers.js.map /***/ }), -/***/ 46554: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 24557: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ReflectionBinaryReader = void 0; -const binary_format_contract_1 = __nccwpck_require__(24290); -const reflection_info_1 = __nccwpck_require__(48980); -const reflection_long_convert_1 = __nccwpck_require__(75194); -const reflection_scalar_default_1 = __nccwpck_require__(28441); -/** - * Reads proto3 messages in binary format using reflection information. - * - * https://developers.google.com/protocol-buffers/docs/encoding - */ -class ReflectionBinaryReader { - constructor(info) { - this.info = info; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; } - prepare() { - var _a; - if (!this.fieldNoToField) { - const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; - this.fieldNoToField = new Map(fieldsInput.map(field => [field.no, field])); + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Agent = void 0; +const net = __importStar(__nccwpck_require__(41808)); +const http = __importStar(__nccwpck_require__(13685)); +const https_1 = __nccwpck_require__(95687); +__exportStar(__nccwpck_require__(29638), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + // In order to support async signatures in `connect()` and Node's native + // connection pooling in `http.Agent`, the array of sockets for each origin + // has to be updated synchronously. This is so the length of the array is + // accurate when `addRequest()` is next called. We achieve this by creating a + // fake socket and adding it to `sockets[origin]` and incrementing + // `totalSocketCount`. + incrementSockets(name) { + // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no + // need to create a fake socket because Node.js native connection pooling + // will never be invoked. + if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { + return null; } - } - /** - * Reads a message from binary format into the target message. - * - * Repeated fields are appended. Map entries are added, overwriting - * existing keys. - * - * If a message field is already present, it will be merged with the - * new data. - */ - read(reader, message, options, length) { - this.prepare(); - const end = length === undefined ? reader.len : reader.pos + length; - while (reader.pos < end) { - // read the tag and find the field - const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); - if (!field) { - let u = options.readUnknownField; - if (u == "throw") - throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); - continue; - } - // target object for the field we are reading - let target = message, repeated = field.repeat, localName = field.localName; - // if field is member of oneof ADT, use ADT as target - if (field.oneof) { - target = target[field.oneof]; - // if other oneof member selected, set new ADT - if (target.oneofKind !== localName) - target = message[field.oneof] = { - oneofKind: localName - }; - } - // we have handled oneof above, we just have read the value into `target[localName]` - switch (field.kind) { - case "scalar": - case "enum": - let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - let L = field.kind == "scalar" ? field.L : undefined; - if (repeated) { - let arr = target[localName]; // safe to assume presence of array, oneof cannot contain repeated values - if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { - let e = reader.uint32() + reader.pos; - while (reader.pos < e) - arr.push(this.scalar(reader, T, L)); - } - else - arr.push(this.scalar(reader, T, L)); - } - else - target[localName] = this.scalar(reader, T, L); - break; - case "message": - if (repeated) { - let arr = target[localName]; // safe to assume presence of array, oneof cannot contain repeated values - let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); - arr.push(msg); - } - else - target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); - break; - case "map": - let [mapKey, mapVal] = this.mapEntry(field, reader, options); - // safe to assume presence of map object, oneof cannot contain repeated values - target[localName][mapKey] = mapVal; - break; - } + // All instances of `sockets` are expected TypeScript errors. The + // alternative is to add it as a private property of this class but that + // will break TypeScript subclassing. + if (!this.sockets[name]) { + // @ts-expect-error `sockets` is readonly in `@types/node` + this.sockets[name] = []; + } + const fakeSocket = new net.Socket({ writable: false }); + this.sockets[name].push(fakeSocket); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount++; + return fakeSocket; + } + decrementSockets(name, socket) { + if (!this.sockets[name] || socket === null) { + return; } + const sockets = this.sockets[name]; + const index = sockets.indexOf(socket); + if (index !== -1) { + sockets.splice(index, 1); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount--; + if (sockets.length === 0) { + // @ts-expect-error `sockets` is readonly in `@types/node` + delete this.sockets[name]; + } + } + } + // In order to properly update the socket pool, we need to call `getName()` on + // the core `https.Agent` if it is a secureEndpoint. + getName(options) { + const secureEndpoint = typeof options.secureEndpoint === 'boolean' + ? options.secureEndpoint + : this.isSecureEndpoint(options); + if (secureEndpoint) { + // @ts-expect-error `getName()` isn't defined in `@types/node` + return https_1.Agent.prototype.getName.call(this, options); + } + // @ts-expect-error `getName()` isn't defined in `@types/node` + return super.getName(options); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + const name = this.getName(connectOpts); + const fakeSocket = this.incrementSockets(name); + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + this.decrementSockets(name, fakeSocket); + if (socket instanceof http.Agent) { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, (err) => { + this.decrementSockets(name, fakeSocket); + cb(err); + }); } - /** - * Read a map field, expecting key field = 1, value field = 2 - */ - mapEntry(field, reader, options) { - let length = reader.uint32(); - let end = reader.pos + length; - let key = undefined; // javascript only allows number or string for object properties - let val = undefined; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case 1: - if (field.K == reflection_info_1.ScalarType.BOOL) - key = reader.bool().toString(); - else - // long types are read as string, number types are okay as number - key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); - break; - case 2: - switch (field.V.kind) { - case "scalar": - val = this.scalar(reader, field.V.T, field.V.L); - break; - case "enum": - val = reader.int32(); - break; - case "message": - val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); - break; - } - break; - default: - throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); - } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); } - if (key === undefined) { - let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); - key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; } - if (val === undefined) - switch (field.V.kind) { - case "scalar": - val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); - break; - case "enum": - val = 0; - break; - case "message": - val = field.V.T().create(); - break; - } - return [key, val]; } - scalar(reader, type, longType) { - switch (type) { - case reflection_info_1.ScalarType.INT32: - return reader.int32(); - case reflection_info_1.ScalarType.STRING: - return reader.string(); - case reflection_info_1.ScalarType.BOOL: - return reader.bool(); - case reflection_info_1.ScalarType.DOUBLE: - return reader.double(); - case reflection_info_1.ScalarType.FLOAT: - return reader.float(); - case reflection_info_1.ScalarType.INT64: - return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); - case reflection_info_1.ScalarType.UINT64: - return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); - case reflection_info_1.ScalarType.FIXED64: - return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); - case reflection_info_1.ScalarType.FIXED32: - return reader.fixed32(); - case reflection_info_1.ScalarType.BYTES: - return reader.bytes(); - case reflection_info_1.ScalarType.UINT32: - return reader.uint32(); - case reflection_info_1.ScalarType.SFIXED32: - return reader.sfixed32(); - case reflection_info_1.ScalarType.SFIXED64: - return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); - case reflection_info_1.ScalarType.SINT32: - return reader.sint32(); - case reflection_info_1.ScalarType.SINT64: - return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; } } } -exports.ReflectionBinaryReader = ReflectionBinaryReader; - +exports.Agent = Agent; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 43632: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; +/***/ 9683: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ReflectionBinaryWriter = void 0; -const binary_format_contract_1 = __nccwpck_require__(24290); -const reflection_info_1 = __nccwpck_require__(48980); -const assert_1 = __nccwpck_require__(88501); -const pb_long_1 = __nccwpck_require__(19823); /** - * Writes proto3 messages in binary format using reflection information. + * archiver-utils * - * https://developers.google.com/protocol-buffers/docs/encoding + * Copyright (c) 2012-2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT */ -class ReflectionBinaryWriter { - constructor(info) { - this.info = info; - } - prepare() { - if (!this.fields) { - const fieldsInput = this.info.fields ? this.info.fields.concat() : []; - this.fields = fieldsInput.sort((a, b) => a.no - b.no); - } +var fs = __nccwpck_require__(29303); +var path = __nccwpck_require__(71017); + +var flatten = __nccwpck_require__(15055); +var difference = __nccwpck_require__(67022); +var union = __nccwpck_require__(1214); +var isPlainObject = __nccwpck_require__(64798); + +var glob = __nccwpck_require__(58477); + +var file = module.exports = {}; + +var pathSeparatorRe = /[\/\\]/g; + +// Process specified wildcard glob patterns or filenames against a +// callback, excluding and uniquing files in the result set. +var processPatterns = function(patterns, fn) { + // Filepaths to return. + var result = []; + // Iterate over flattened patterns array. + flatten(patterns).forEach(function(pattern) { + // If the first character is ! it should be omitted + var exclusion = pattern.indexOf('!') === 0; + // If the pattern is an exclusion, remove the ! + if (exclusion) { pattern = pattern.slice(1); } + // Find all matching files for this pattern. + var matches = fn(pattern); + if (exclusion) { + // If an exclusion, remove matching files. + result = difference(result, matches); + } else { + // Otherwise add matching files. + result = union(result, matches); } - /** - * Writes the message to binary format. - */ - write(message, writer, options) { - this.prepare(); - for (const field of this.fields) { - let value, // this will be our field value, whether it is member of a oneof or not - emitDefault, // whether we emit the default value (only true for oneof members) - repeated = field.repeat, localName = field.localName; - // handle oneof ADT - if (field.oneof) { - const group = message[field.oneof]; - if (group.oneofKind !== localName) - continue; // if field is not selected, skip - value = group[localName]; - emitDefault = true; - } - else { - value = message[localName]; - emitDefault = false; - } - // we have handled oneof above. we just have to honor `emitDefault`. - switch (field.kind) { - case "scalar": - case "enum": - let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - if (repeated) { - assert_1.assert(Array.isArray(value)); - if (repeated == reflection_info_1.RepeatType.PACKED) - this.packed(writer, T, field.no, value); - else - for (const item of value) - this.scalar(writer, T, field.no, item, true); - } - else if (value === undefined) - assert_1.assert(field.opt); - else - this.scalar(writer, T, field.no, value, emitDefault || field.opt); - break; - case "message": - if (repeated) { - assert_1.assert(Array.isArray(value)); - for (const item of value) - this.message(writer, options, field.T(), field.no, item); - } - else { - this.message(writer, options, field.T(), field.no, value); - } - break; - case "map": - assert_1.assert(typeof value == 'object' && value !== null); - for (const [key, val] of Object.entries(value)) - this.mapEntry(writer, options, field, key, val); - break; - } + }); + return result; +}; + +// True if the file path exists. +file.exists = function() { + var filepath = path.join.apply(path, arguments); + return fs.existsSync(filepath); +}; + +// Return an array of all file paths that match the given wildcard patterns. +file.expand = function(...args) { + // If the first argument is an options object, save those options to pass + // into the File.prototype.glob.sync method. + var options = isPlainObject(args[0]) ? args.shift() : {}; + // Use the first argument if it's an Array, otherwise convert the arguments + // object to an array and use that. + var patterns = Array.isArray(args[0]) ? args[0] : args; + // Return empty set if there are no patterns or filepaths. + if (patterns.length === 0) { return []; } + // Return all matching filepaths. + var matches = processPatterns(patterns, function(pattern) { + // Find all matching files for this pattern. + return glob.sync(pattern, options); + }); + // Filter result set? + if (options.filter) { + matches = matches.filter(function(filepath) { + filepath = path.join(options.cwd || '', filepath); + try { + if (typeof options.filter === 'function') { + return options.filter(filepath); + } else { + // If the file is of the right type and exists, this should work. + return fs.statSync(filepath)[options.filter](); } - let u = options.writeUnknownFields; - if (u !== false) - (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); + } catch(e) { + // Otherwise, it's probably not the right type. + return false; + } + }); + } + return matches; +}; + +// Build a multi task "files" object dynamically. +file.expandMapping = function(patterns, destBase, options) { + options = Object.assign({ + rename: function(destBase, destPath) { + return path.join(destBase || '', destPath); } - mapEntry(writer, options, field, key, value) { - writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); - writer.fork(); - // javascript only allows number or string for object properties - // we convert from our representation to the protobuf type - let keyValue = key; - switch (field.K) { - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.UINT32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - keyValue = Number.parseInt(key); - break; - case reflection_info_1.ScalarType.BOOL: - assert_1.assert(key == 'true' || key == 'false'); - keyValue = key == 'true'; - break; - } - // write key, expecting key field number = 1 - this.scalar(writer, field.K, 1, keyValue, true); - // write value, expecting value field number = 2 - switch (field.V.kind) { - case 'scalar': - this.scalar(writer, field.V.T, 2, value, true); - break; - case 'enum': - this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); - break; - case 'message': - this.message(writer, options, field.V.T(), 2, value); - break; - } - writer.join(); + }, options); + var files = []; + var fileByDest = {}; + // Find all files matching pattern, using passed-in options. + file.expand(options, patterns).forEach(function(src) { + var destPath = src; + // Flatten? + if (options.flatten) { + destPath = path.basename(destPath); } - message(writer, options, handler, fieldNo, value) { - if (value === undefined) - return; - handler.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); - writer.join(); + // Change the extension? + if (options.ext) { + destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); } - /** - * Write a single scalar value. - */ - scalar(writer, type, fieldNo, value, emitDefault) { - let [wireType, method, isDefault] = this.scalarInfo(type, value); - if (!isDefault || emitDefault) { - writer.tag(fieldNo, wireType); - writer[method](value); - } + // Generate destination filename. + var dest = options.rename(destBase, destPath, options); + // Prepend cwd to src path if necessary. + if (options.cwd) { src = path.join(options.cwd, src); } + // Normalize filepaths to be unix-style. + dest = dest.replace(pathSeparatorRe, '/'); + src = src.replace(pathSeparatorRe, '/'); + // Map correct src path to dest path. + if (fileByDest[dest]) { + // If dest already exists, push this src onto that dest's src array. + fileByDest[dest].src.push(src); + } else { + // Otherwise create a new src-dest file mapping object. + files.push({ + src: [src], + dest: dest, + }); + // And store a reference for later use. + fileByDest[dest] = files[files.length - 1]; } - /** - * Write an array of scalar values in packed format. - */ - packed(writer, type, fieldNo, value) { - if (!value.length) - return; - assert_1.assert(type !== reflection_info_1.ScalarType.BYTES && type !== reflection_info_1.ScalarType.STRING); - // write tag - writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); - // begin length-delimited - writer.fork(); - // write values without tags - let [, method,] = this.scalarInfo(type); - for (let i = 0; i < value.length; i++) - writer[method](value[i]); - // end length delimited - writer.join(); + }); + return files; +}; + +// reusing bits of grunt's multi-task source normalization +file.normalizeFilesArray = function(data) { + var files = []; + + data.forEach(function(obj) { + var prop; + if ('src' in obj || 'dest' in obj) { + files.push(obj); } - /** - * Get information for writing a scalar value. - * - * Returns tuple: - * [0]: appropriate WireType - * [1]: name of the appropriate method of IBinaryWriter - * [2]: whether the given value is a default value - * - * If argument `value` is omitted, [2] is always false. - */ - scalarInfo(type, value) { - let t = binary_format_contract_1.WireType.Varint; - let m; - let i = value === undefined; - let d = value === 0; - switch (type) { - case reflection_info_1.ScalarType.INT32: - m = "int32"; - break; - case reflection_info_1.ScalarType.STRING: - d = i || !value.length; - t = binary_format_contract_1.WireType.LengthDelimited; - m = "string"; - break; - case reflection_info_1.ScalarType.BOOL: - d = value === false; - m = "bool"; - break; - case reflection_info_1.ScalarType.UINT32: - m = "uint32"; - break; - case reflection_info_1.ScalarType.DOUBLE: - t = binary_format_contract_1.WireType.Bit64; - m = "double"; - break; - case reflection_info_1.ScalarType.FLOAT: - t = binary_format_contract_1.WireType.Bit32; - m = "float"; - break; - case reflection_info_1.ScalarType.INT64: - d = i || pb_long_1.PbLong.from(value).isZero(); - m = "int64"; - break; - case reflection_info_1.ScalarType.UINT64: - d = i || pb_long_1.PbULong.from(value).isZero(); - m = "uint64"; - break; - case reflection_info_1.ScalarType.FIXED64: - d = i || pb_long_1.PbULong.from(value).isZero(); - t = binary_format_contract_1.WireType.Bit64; - m = "fixed64"; - break; - case reflection_info_1.ScalarType.BYTES: - d = i || !value.byteLength; - t = binary_format_contract_1.WireType.LengthDelimited; - m = "bytes"; - break; - case reflection_info_1.ScalarType.FIXED32: - t = binary_format_contract_1.WireType.Bit32; - m = "fixed32"; - break; - case reflection_info_1.ScalarType.SFIXED32: - t = binary_format_contract_1.WireType.Bit32; - m = "sfixed32"; - break; - case reflection_info_1.ScalarType.SFIXED64: - d = i || pb_long_1.PbLong.from(value).isZero(); - t = binary_format_contract_1.WireType.Bit64; - m = "sfixed64"; - break; - case reflection_info_1.ScalarType.SINT32: - m = "sint32"; - break; - case reflection_info_1.ScalarType.SINT64: - d = i || pb_long_1.PbLong.from(value).isZero(); - m = "sint64"; - break; + }); + + if (files.length === 0) { + return []; + } + + files = _(files).chain().forEach(function(obj) { + if (!('src' in obj) || !obj.src) { return; } + // Normalize .src properties to flattened array. + if (Array.isArray(obj.src)) { + obj.src = flatten(obj.src); + } else { + obj.src = [obj.src]; + } + }).map(function(obj) { + // Build options object, removing unwanted properties. + var expandOptions = Object.assign({}, obj); + delete expandOptions.src; + delete expandOptions.dest; + + // Expand file mappings. + if (obj.expand) { + return file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) { + // Copy obj properties to result. + var result = Object.assign({}, obj); + // Make a clone of the orig obj available. + result.orig = Object.assign({}, obj); + // Set .src and .dest, processing both as templates. + result.src = mapObj.src; + result.dest = mapObj.dest; + // Remove unwanted properties. + ['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) { + delete result[prop]; + }); + return result; + }); + } + + // Copy obj properties to result, adding an .orig property. + var result = Object.assign({}, obj); + // Make a clone of the orig obj available. + result.orig = Object.assign({}, obj); + + if ('src' in result) { + // Expose an expand-on-demand getter method as .src. + Object.defineProperty(result, 'src', { + enumerable: true, + get: function fn() { + var src; + if (!('result' in fn)) { + src = obj.src; + // If src is an array, flatten it. Otherwise, make it into an array. + src = Array.isArray(src) ? flatten(src) : [src]; + // Expand src files, memoizing result. + fn.result = file.expand(expandOptions, src); + } + return fn.result; } - return [t, m, i || d]; + }); + } + + if ('dest' in result) { + result.dest = obj.dest; } -} -exports.ReflectionBinaryWriter = ReflectionBinaryWriter; + return result; + }).flatten().value(); -/***/ }), + return files; +}; -/***/ 92609: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -"use strict"; +/***/ }), + +/***/ 76358: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.containsMessageType = void 0; -const message_type_contract_1 = __nccwpck_require__(87554); /** - * Check if the provided object is a proto message. + * archiver-utils * - * Note that this is an experimental feature - it is here to stay, but - * implementation details may change without notice. + * Copyright (c) 2015 Chris Talkington. + * Licensed under the MIT license. + * https://github.com/archiverjs/archiver-utils/blob/master/LICENSE */ -function containsMessageType(msg) { - return msg[message_type_contract_1.MESSAGE_TYPE] != null; -} -exports.containsMessageType = containsMessageType; +var fs = __nccwpck_require__(29303); +var path = __nccwpck_require__(71017); +var isStream = __nccwpck_require__(16425); +var lazystream = __nccwpck_require__(92058); +var normalizePath = __nccwpck_require__(54882); +var defaults = __nccwpck_require__(41871); +var Stream = (__nccwpck_require__(12781).Stream); +var PassThrough = (__nccwpck_require__(32555).PassThrough); -/***/ }), +var utils = module.exports = {}; +utils.file = __nccwpck_require__(9683); -/***/ 86727: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +utils.collectStream = function(source, callback) { + var collection = []; + var size = 0; -"use strict"; + source.on('error', callback); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.reflectionCreate = void 0; -const reflection_scalar_default_1 = __nccwpck_require__(28441); -const message_type_contract_1 = __nccwpck_require__(87554); -/** - * Creates an instance of the generic message, using the field - * information. - */ -function reflectionCreate(type) { - /** - * This ternary can be removed in the next major version. - * The `Object.create()` code path utilizes a new `messagePrototype` - * property on the `IMessageType` which has this same `MESSAGE_TYPE` - * non-enumerable property on it. Doing it this way means that we only - * pay the cost of `Object.defineProperty()` once per `IMessageType` - * class of once per "instance". The falsy code path is only provided - * for backwards compatibility in cases where the runtime library is - * updated without also updating the generated code. - */ - const msg = type.messagePrototype - ? Object.create(type.messagePrototype) - : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type }); - for (let field of type.fields) { - let name = field.localName; - if (field.opt) - continue; - if (field.oneof) - msg[field.oneof] = { oneofKind: undefined }; - else if (field.repeat) - msg[name] = []; - else - switch (field.kind) { - case "scalar": - msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); - break; - case "enum": - // we require 0 to be default value for all enums - msg[name] = 0; - break; - case "map": - msg[name] = {}; - break; - } + source.on('data', function(chunk) { + collection.push(chunk); + size += chunk.length; + }); + + source.on('end', function() { + var buf = Buffer.alloc(size); + var offset = 0; + + collection.forEach(function(data) { + data.copy(buf, offset); + offset += data.length; + }); + + callback(null, buf); + }); +}; + +utils.dateify = function(dateish) { + dateish = dateish || new Date(); + + if (dateish instanceof Date) { + dateish = dateish; + } else if (typeof dateish === 'string') { + dateish = new Date(dateish); + } else { + dateish = new Date(); + } + + return dateish; +}; + +// this is slightly different from lodash version +utils.defaults = function(object, source, guard) { + var args = arguments; + args[0] = args[0] || {}; + + return defaults(...args); +}; + +utils.isStream = function(source) { + return isStream(source); +}; + +utils.lazyReadStream = function(filepath) { + return new lazystream.Readable(function() { + return fs.createReadStream(filepath); + }); +}; + +utils.normalizeInputSource = function(source) { + if (source === null) { + return Buffer.alloc(0); + } else if (typeof source === 'string') { + return Buffer.from(source); + } else if (utils.isStream(source)) { + // Always pipe through a PassThrough stream to guarantee pausing the stream if it's already flowing, + // since it will only be processed in a (distant) future iteration of the event loop, and will lose + // data if already flowing now. + return source.pipe(new PassThrough()); + } + + return source; +}; + +utils.sanitizePath = function(filepath) { + return normalizePath(filepath, false).replace(/^\w+:/, '').replace(/^(\.\.\/|\/)+/, ''); +}; + +utils.trailingSlashIt = function(str) { + return str.slice(-1) !== '/' ? str + '/' : str; +}; + +utils.unixifyPath = function(filepath) { + return normalizePath(filepath, false).replace(/^\w+:/, ''); +}; + +utils.walkdir = function(dirpath, base, callback) { + var results = []; + + if (typeof base === 'function') { + callback = base; + base = dirpath; + } + + fs.readdir(dirpath, function(err, list) { + var i = 0; + var file; + var filepath; + + if (err) { + return callback(err); } - return msg; -} -exports.reflectionCreate = reflectionCreate; + (function next() { + file = list[i++]; -/***/ }), + if (!file) { + return callback(null, results); + } -/***/ 19439: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + filepath = path.join(dirpath, file); -"use strict"; + fs.stat(filepath, function(err, stats) { + results.push({ + path: filepath, + relative: path.relative(base, filepath).replace(/\\/g, '/'), + stats: stats + }); -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.reflectionEquals = void 0; -const reflection_info_1 = __nccwpck_require__(48980); -/** - * Determines whether two message of the same type have the same field values. - * Checks for deep equality, traversing repeated fields, oneof groups, maps - * and messages recursively. - * Will also return true if both messages are `undefined`. - */ -function reflectionEquals(info, a, b) { - if (a === b) - return true; - if (!a || !b) - return false; - for (let field of info.fields) { - let localName = field.localName; - let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; - let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; - switch (field.kind) { - case "enum": - case "scalar": - let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - if (!(field.repeat - ? repeatedPrimitiveEq(t, val_a, val_b) - : primitiveEq(t, val_a, val_b))) - return false; - break; - case "map": - if (!(field.V.kind == "message" - ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) - : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) - return false; - break; - case "message": - let T = field.T(); - if (!(field.repeat - ? repeatedMsgEq(T, val_a, val_b) - : T.equals(val_a, val_b))) - return false; - break; + if (stats && stats.isDirectory()) { + utils.walkdir(filepath, base, function(err, res) { + if(err){ + return callback(err); + } + + res.forEach(function(dirEntry) { + results.push(dirEntry); + }); + + next(); + }); + } else { + next(); } - } - return true; -} -exports.reflectionEquals = reflectionEquals; -const objectValues = Object.values; -function primitiveEq(type, a, b) { - if (a === b) - return true; - if (type !== reflection_info_1.ScalarType.BYTES) - return false; - let ba = a; - let bb = b; - if (ba.length !== bb.length) - return false; - for (let i = 0; i < ba.length; i++) - if (ba[i] != bb[i]) - return false; - return true; -} -function repeatedPrimitiveEq(type, a, b) { - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) - if (!primitiveEq(type, a[i], b[i])) - return false; - return true; -} -function repeatedMsgEq(type, a, b) { - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) - if (!type.equals(a[i], b[i])) - return false; - return true; -} + }); + })(); + }); +}; /***/ }), -/***/ 48980: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7531: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +/** + * Archiver Vending + * + * @ignore + * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} + * @copyright (c) 2012-2014 Chris Talkington, contributors. + */ +var Archiver = __nccwpck_require__(67555); + +var formats = {}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.readMessageOption = exports.readFieldOption = exports.readFieldOptions = exports.normalizeFieldInfo = exports.RepeatType = exports.LongType = exports.ScalarType = void 0; -const lower_camel_case_1 = __nccwpck_require__(80821); /** - * Scalar value types. This is a subset of field types declared by protobuf - * enum google.protobuf.FieldDescriptorProto.Type The types GROUP and MESSAGE - * are omitted, but the numerical values are identical. + * Dispenses a new Archiver instance. + * + * @constructor + * @param {String} format The archive format to use. + * @param {Object} options See [Archiver]{@link Archiver} + * @return {Archiver} */ -var ScalarType; -(function (ScalarType) { - // 0 is reserved for errors. - // Order is weird for historical reasons. - ScalarType[ScalarType["DOUBLE"] = 1] = "DOUBLE"; - ScalarType[ScalarType["FLOAT"] = 2] = "FLOAT"; - // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if - // negative values are likely. - ScalarType[ScalarType["INT64"] = 3] = "INT64"; - ScalarType[ScalarType["UINT64"] = 4] = "UINT64"; - // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if - // negative values are likely. - ScalarType[ScalarType["INT32"] = 5] = "INT32"; - ScalarType[ScalarType["FIXED64"] = 6] = "FIXED64"; - ScalarType[ScalarType["FIXED32"] = 7] = "FIXED32"; - ScalarType[ScalarType["BOOL"] = 8] = "BOOL"; - ScalarType[ScalarType["STRING"] = 9] = "STRING"; - // Tag-delimited aggregate. - // Group type is deprecated and not supported in proto3. However, Proto3 - // implementations should still be able to parse the group wire format and - // treat group fields as unknown fields. - // TYPE_GROUP = 10, - // TYPE_MESSAGE = 11, // Length-delimited aggregate. - // New in version 2. - ScalarType[ScalarType["BYTES"] = 12] = "BYTES"; - ScalarType[ScalarType["UINT32"] = 13] = "UINT32"; - // TYPE_ENUM = 14, - ScalarType[ScalarType["SFIXED32"] = 15] = "SFIXED32"; - ScalarType[ScalarType["SFIXED64"] = 16] = "SFIXED64"; - ScalarType[ScalarType["SINT32"] = 17] = "SINT32"; - ScalarType[ScalarType["SINT64"] = 18] = "SINT64"; -})(ScalarType = exports.ScalarType || (exports.ScalarType = {})); +var vending = function(format, options) { + return vending.create(format, options); +}; + /** - * JavaScript representation of 64 bit integral types. Equivalent to the - * field option "jstype". + * Creates a new Archiver instance. * - * By default, protobuf-ts represents 64 bit types as `bigint`. + * @param {String} format The archive format to use. + * @param {Object} options See [Archiver]{@link Archiver} + * @return {Archiver} + */ +vending.create = function(format, options) { + if (formats[format]) { + var instance = new Archiver(format, options); + instance.setFormat(format); + instance.setModule(new formats[format](options)); + + return instance; + } else { + throw new Error('create(' + format + '): format not registered'); + } +}; + +/** + * Registers a format for use with archiver. * - * You can change the default behaviour by enabling the plugin parameter - * `long_type_string`, which will represent 64 bit types as `string`. + * @param {String} format The name of the format. + * @param {Function} module The function for archiver to interact with. + * @return void + */ +vending.registerFormat = function(format, module) { + if (formats[format]) { + throw new Error('register(' + format + '): format already registered'); + } + + if (typeof module !== 'function') { + throw new Error('register(' + format + '): format module invalid'); + } + + if (typeof module.prototype.append !== 'function' || typeof module.prototype.finalize !== 'function') { + throw new Error('register(' + format + '): format module missing methods'); + } + + formats[format] = module; +}; + +/** + * Check if the format is already registered. + * + * @param {String} format the name of the format. + * @return boolean + */ +vending.isRegisteredFormat = function (format) { + if (formats[format]) { + return true; + } + + return false; +}; + +vending.registerFormat('zip', __nccwpck_require__(43642)); +vending.registerFormat('tar', __nccwpck_require__(83269)); +vending.registerFormat('json', __nccwpck_require__(61314)); + +module.exports = vending; + +/***/ }), + +/***/ 67555: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * Archiver Core * - * Alternatively, you can change the behaviour for individual fields - * with the field option "jstype": + * @ignore + * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} + * @copyright (c) 2012-2014 Chris Talkington, contributors. + */ +var fs = __nccwpck_require__(57147); +var glob = __nccwpck_require__(98347); +var async = __nccwpck_require__(43667); +var path = __nccwpck_require__(71017); +var util = __nccwpck_require__(76358); + +var inherits = (__nccwpck_require__(73837).inherits); +var ArchiverError = __nccwpck_require__(79291); +var Transform = (__nccwpck_require__(32555).Transform); + +var win32 = process.platform === 'win32'; + +/** + * @constructor + * @param {String} format The archive format to use. + * @param {(CoreOptions|TransformOptions)} options See also {@link ZipOptions} and {@link TarOptions}. + */ +var Archiver = function(format, options) { + if (!(this instanceof Archiver)) { + return new Archiver(format, options); + } + + if (typeof format !== 'string') { + options = format; + format = 'zip'; + } + + options = this.options = util.defaults(options, { + highWaterMark: 1024 * 1024, + statConcurrency: 4 + }); + + Transform.call(this, options); + + this._format = false; + this._module = false; + this._pending = 0; + this._pointer = 0; + + this._entriesCount = 0; + this._entriesProcessedCount = 0; + this._fsEntriesTotalBytes = 0; + this._fsEntriesProcessedBytes = 0; + + this._queue = async.queue(this._onQueueTask.bind(this), 1); + this._queue.drain(this._onQueueDrain.bind(this)); + + this._statQueue = async.queue(this._onStatQueueTask.bind(this), options.statConcurrency); + this._statQueue.drain(this._onQueueDrain.bind(this)); + + this._state = { + aborted: false, + finalize: false, + finalizing: false, + finalized: false, + modulePiped: false + }; + + this._streams = []; +}; + +inherits(Archiver, Transform); + +/** + * Internal logic for `abort`. * - * ```protobuf - * uint64 my_field = 1 [jstype = JS_STRING]; - * uint64 other_field = 2 [jstype = JS_NUMBER]; - * ``` + * @private + * @return void */ -var LongType; -(function (LongType) { - /** - * Use JavaScript `bigint`. - * - * Field option `[jstype = JS_NORMAL]`. - */ - LongType[LongType["BIGINT"] = 0] = "BIGINT"; - /** - * Use JavaScript `string`. - * - * Field option `[jstype = JS_STRING]`. - */ - LongType[LongType["STRING"] = 1] = "STRING"; - /** - * Use JavaScript `number`. - * - * Large values will loose precision. - * - * Field option `[jstype = JS_NUMBER]`. - */ - LongType[LongType["NUMBER"] = 2] = "NUMBER"; -})(LongType = exports.LongType || (exports.LongType = {})); +Archiver.prototype._abort = function() { + this._state.aborted = true; + this._queue.kill(); + this._statQueue.kill(); + + if (this._queue.idle()) { + this._shutdown(); + } +}; + /** - * Protobuf 2.1.0 introduced packed repeated fields. - * Setting the field option `[packed = true]` enables packing. + * Internal helper for appending files. * - * In proto3, all repeated fields are packed by default. - * Setting the field option `[packed = false]` disables packing. + * @private + * @param {String} filepath The source filepath. + * @param {EntryData} data The entry data. + * @return void + */ +Archiver.prototype._append = function(filepath, data) { + data = data || {}; + + var task = { + source: null, + filepath: filepath + }; + + if (!data.name) { + data.name = filepath; + } + + data.sourcePath = filepath; + task.data = data; + this._entriesCount++; + + if (data.stats && data.stats instanceof fs.Stats) { + task = this._updateQueueTaskWithStats(task, data.stats); + if (task) { + if (data.stats.size) { + this._fsEntriesTotalBytes += data.stats.size; + } + + this._queue.push(task); + } + } else { + this._statQueue.push(task); + } +}; + +/** + * Internal logic for `finalize`. * - * Packed repeated fields are encoded with a single tag, - * then a length-delimiter, then the element values. + * @private + * @return void + */ +Archiver.prototype._finalize = function() { + if (this._state.finalizing || this._state.finalized || this._state.aborted) { + return; + } + + this._state.finalizing = true; + + this._moduleFinalize(); + + this._state.finalizing = false; + this._state.finalized = true; +}; + +/** + * Checks the various state variables to determine if we can `finalize`. * - * Unpacked repeated fields are encoded with a tag and - * value for each element. + * @private + * @return {Boolean} + */ +Archiver.prototype._maybeFinalize = function() { + if (this._state.finalizing || this._state.finalized || this._state.aborted) { + return false; + } + + if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { + this._finalize(); + return true; + } + + return false; +}; + +/** + * Appends an entry to the module. * - * `bytes` and `string` cannot be packed. + * @private + * @fires Archiver#entry + * @param {(Buffer|Stream)} source + * @param {EntryData} data + * @param {Function} callback + * @return void */ -var RepeatType; -(function (RepeatType) { - /** - * The field is not repeated. - */ - RepeatType[RepeatType["NO"] = 0] = "NO"; +Archiver.prototype._moduleAppend = function(source, data, callback) { + if (this._state.aborted) { + callback(); + return; + } + + this._module.append(source, data, function(err) { + this._task = null; + + if (this._state.aborted) { + this._shutdown(); + return; + } + + if (err) { + this.emit('error', err); + setImmediate(callback); + return; + } + /** - * The field is repeated and should be packed. - * Invalid for `bytes` and `string`, they cannot be packed. + * Fires when the entry's input has been processed and appended to the archive. + * + * @event Archiver#entry + * @type {EntryData} */ - RepeatType[RepeatType["PACKED"] = 1] = "PACKED"; + this.emit('entry', data); + this._entriesProcessedCount++; + + if (data.stats && data.stats.size) { + this._fsEntriesProcessedBytes += data.stats.size; + } + /** - * The field is repeated but should not be packed. - * The only valid repeat type for repeated `bytes` and `string`. + * @event Archiver#progress + * @type {ProgressData} */ - RepeatType[RepeatType["UNPACKED"] = 2] = "UNPACKED"; -})(RepeatType = exports.RepeatType || (exports.RepeatType = {})); + this.emit('progress', { + entries: { + total: this._entriesCount, + processed: this._entriesProcessedCount + }, + fs: { + totalBytes: this._fsEntriesTotalBytes, + processedBytes: this._fsEntriesProcessedBytes + } + }); + + setImmediate(callback); + }.bind(this)); +}; + /** - * Turns PartialFieldInfo into FieldInfo. + * Finalizes the module. + * + * @private + * @return void */ -function normalizeFieldInfo(field) { - var _a, _b, _c, _d; - field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); - field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); - field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; - field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : (field.repeat ? false : field.oneof ? false : field.kind == "message"); - return field; -} -exports.normalizeFieldInfo = normalizeFieldInfo; +Archiver.prototype._moduleFinalize = function() { + if (typeof this._module.finalize === 'function') { + this._module.finalize(); + } else if (typeof this._module.end === 'function') { + this._module.end(); + } else { + this.emit('error', new ArchiverError('NOENDMETHOD')); + } +}; + /** - * Read custom field options from a generated message type. + * Pipes the module to our internal stream with error bubbling. * - * @deprecated use readFieldOption() + * @private + * @return void */ -function readFieldOptions(messageType, fieldName, extensionName, extensionType) { - var _a; - const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; - return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : undefined; -} -exports.readFieldOptions = readFieldOptions; -function readFieldOption(messageType, fieldName, extensionName, extensionType) { - var _a; - const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; - if (!options) { - return undefined; - } - const optionVal = options[extensionName]; - if (optionVal === undefined) { - return optionVal; - } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; -} -exports.readFieldOption = readFieldOption; -function readMessageOption(messageType, extensionName, extensionType) { - const options = messageType.options; - const optionVal = options[extensionName]; - if (optionVal === undefined) { - return optionVal; - } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; -} -exports.readMessageOption = readMessageOption; - +Archiver.prototype._modulePipe = function() { + this._module.on('error', this._onModuleError.bind(this)); + this._module.pipe(this); + this._state.modulePiped = true; +}; -/***/ }), +/** + * Determines if the current module supports a defined feature. + * + * @private + * @param {String} key + * @return {Boolean} + */ +Archiver.prototype._moduleSupports = function(key) { + if (!this._module.supports || !this._module.supports[key]) { + return false; + } -/***/ 49661: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + return this._module.supports[key]; +}; -"use strict"; +/** + * Unpipes the module from our internal stream. + * + * @private + * @return void + */ +Archiver.prototype._moduleUnpipe = function() { + this._module.unpipe(this); + this._state.modulePiped = false; +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ReflectionJsonReader = void 0; -const json_typings_1 = __nccwpck_require__(26982); -const base64_1 = __nccwpck_require__(90943); -const reflection_info_1 = __nccwpck_require__(48980); -const pb_long_1 = __nccwpck_require__(19823); -const assert_1 = __nccwpck_require__(88501); -const reflection_long_convert_1 = __nccwpck_require__(75194); /** - * Reads proto3 messages in canonical JSON format using reflection information. + * Normalizes entry data with fallbacks for key properties. * - * https://developers.google.com/protocol-buffers/docs/proto3#json + * @private + * @param {Object} data + * @param {fs.Stats} stats + * @return {Object} */ -class ReflectionJsonReader { - constructor(info) { - this.info = info; +Archiver.prototype._normalizeEntryData = function(data, stats) { + data = util.defaults(data, { + type: 'file', + name: null, + date: null, + mode: null, + prefix: null, + sourcePath: null, + stats: false + }); + + if (stats && data.stats === false) { + data.stats = stats; + } + + var isDir = data.type === 'directory'; + + if (data.name) { + if (typeof data.prefix === 'string' && '' !== data.prefix) { + data.name = data.prefix + '/' + data.name; + data.prefix = null; } - prepare() { - var _a; - if (this.fMap === undefined) { - this.fMap = {}; - const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; - for (const field of fieldsInput) { - this.fMap[field.name] = field; - this.fMap[field.jsonName] = field; - this.fMap[field.localName] = field; - } - } + + data.name = util.sanitizePath(data.name); + + if (data.type !== 'symlink' && data.name.slice(-1) === '/') { + isDir = true; + data.type = 'directory'; + } else if (isDir) { + data.name += '/'; } - // Cannot parse JSON for #. - assert(condition, fieldName, jsonValue) { - if (!condition) { - let what = json_typings_1.typeofJsonValue(jsonValue); - if (what == "number" || what == "boolean") - what = jsonValue.toString(); - throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); - } + } + + // 511 === 0777; 493 === 0755; 438 === 0666; 420 === 0644 + if (typeof data.mode === 'number') { + if (win32) { + data.mode &= 511; + } else { + data.mode &= 4095 } - /** - * Reads a message from canonical JSON format into the target message. - * - * Repeated fields are appended. Map entries are added, overwriting - * existing keys. - * - * If a message field is already present, it will be merged with the - * new data. - */ - read(input, message, options) { - this.prepare(); - const oneofsHandled = []; - for (const [jsonKey, jsonValue] of Object.entries(input)) { - const field = this.fMap[jsonKey]; - if (!field) { - if (!options.ignoreUnknownFields) - throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); - continue; - } - const localName = field.localName; - // handle oneof ADT - let target; // this will be the target for the field value, whether it is member of a oneof or not - if (field.oneof) { - if (jsonValue === null && (field.kind !== 'enum' || field.T()[0] !== 'google.protobuf.NullValue')) { - continue; - } - // since json objects are unordered by specification, it is not possible to take the last of multiple oneofs - if (oneofsHandled.includes(field.oneof)) - throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); - oneofsHandled.push(field.oneof); - target = message[field.oneof] = { - oneofKind: localName - }; - } - else { - target = message; - } - // we have handled oneof above. we just have read the value into `target`. - if (field.kind == 'map') { - if (jsonValue === null) { - continue; - } - // check input - this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); - // our target to put map entries into - const fieldObj = target[localName]; - // read entries - for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { - this.assert(jsonObjValue !== null, field.name + " map value", null); - // read value - let val; - switch (field.V.kind) { - case "message": - val = field.V.T().internalJsonRead(jsonObjValue, options); - break; - case "enum": - val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - break; - case "scalar": - val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); - break; - } - this.assert(val !== undefined, field.name + " map value", jsonObjValue); - // read key - let key = jsonObjKey; - if (field.K == reflection_info_1.ScalarType.BOOL) - key = key == "true" ? true : key == "false" ? false : key; - key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); - fieldObj[key] = val; - } - } - else if (field.repeat) { - if (jsonValue === null) - continue; - // check input - this.assert(Array.isArray(jsonValue), field.name, jsonValue); - // our target to put array entries into - const fieldArr = target[localName]; - // read array entries - for (const jsonItem of jsonValue) { - this.assert(jsonItem !== null, field.name, null); - let val; - switch (field.kind) { - case "message": - val = field.T().internalJsonRead(jsonItem, options); - break; - case "enum": - val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - break; - case "scalar": - val = this.scalar(jsonItem, field.T, field.L, field.name); - break; - } - this.assert(val !== undefined, field.name, jsonValue); - fieldArr.push(val); - } - } - else { - switch (field.kind) { - case "message": - if (jsonValue === null && field.T().typeName != 'google.protobuf.Value') { - this.assert(field.oneof === undefined, field.name + " (oneof member)", null); - continue; - } - target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); - break; - case "enum": - let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - target[localName] = val; - break; - case "scalar": - target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); - break; - } - } - } - } - /** - * Returns `false` for unrecognized string representations. - * - * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). - */ - enum(type, json, fieldName, ignoreUnknownFields) { - if (type[0] == 'google.protobuf.NullValue') - assert_1.assert(json === null || json === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} only accepts null.`); - if (json === null) - // we require 0 to be default value for all enums - return 0; - switch (typeof json) { - case "number": - assert_1.assert(Number.isInteger(json), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json}.`); - return json; - case "string": - let localEnumName = json; - if (type[2] && json.substring(0, type[2].length) === type[2]) - // lookup without the shared prefix - localEnumName = json.substring(type[2].length); - let enumNumber = type[1][localEnumName]; - if (typeof enumNumber === 'undefined' && ignoreUnknownFields) { - return false; - } - assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} has no value for "${json}".`); - return enumNumber; - } - assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json}".`); + } else if (data.stats && data.mode === null) { + if (win32) { + data.mode = data.stats.mode & 511; + } else { + data.mode = data.stats.mode & 4095; } - scalar(json, type, longType, fieldName) { - let e; - try { - switch (type) { - // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". - // Either numbers or strings are accepted. Exponent notation is also accepted. - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - if (json === null) - return .0; - if (json === "NaN") - return Number.NaN; - if (json === "Infinity") - return Number.POSITIVE_INFINITY; - if (json === "-Infinity") - return Number.NEGATIVE_INFINITY; - if (json === "") { - e = "empty string"; - break; - } - if (typeof json == "string" && json.trim().length !== json.length) { - e = "extra whitespace"; - break; - } - if (typeof json != "string" && typeof json != "number") { - break; - } - let float = Number(json); - if (Number.isNaN(float)) { - e = "not a number"; - break; - } - if (!Number.isFinite(float)) { - // infinity and -infinity are handled by string representation above, so this is an error - e = "too large or small"; - break; - } - if (type == reflection_info_1.ScalarType.FLOAT) - assert_1.assertFloat32(float); - return float; - // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - case reflection_info_1.ScalarType.UINT32: - if (json === null) - return 0; - let int32; - if (typeof json == "number") - int32 = json; - else if (json === "") - e = "empty string"; - else if (typeof json == "string") { - if (json.trim().length !== json.length) - e = "extra whitespace"; - else - int32 = Number(json); - } - if (int32 === undefined) - break; - if (type == reflection_info_1.ScalarType.UINT32) - assert_1.assertUInt32(int32); - else - assert_1.assertInt32(int32); - return int32; - // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - if (json === null) - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); - if (typeof json != "number" && typeof json != "string") - break; - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json), longType); - case reflection_info_1.ScalarType.FIXED64: - case reflection_info_1.ScalarType.UINT64: - if (json === null) - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); - if (typeof json != "number" && typeof json != "string") - break; - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json), longType); - // bool: - case reflection_info_1.ScalarType.BOOL: - if (json === null) - return false; - if (typeof json !== "boolean") - break; - return json; - // string: - case reflection_info_1.ScalarType.STRING: - if (json === null) - return ""; - if (typeof json !== "string") { - e = "extra whitespace"; - break; - } - try { - encodeURIComponent(json); - } - catch (e) { - e = "invalid UTF8"; - break; - } - return json; - // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. - // Either standard or URL-safe base64 encoding with/without paddings are accepted. - case reflection_info_1.ScalarType.BYTES: - if (json === null || json === "") - return new Uint8Array(0); - if (typeof json !== 'string') - break; - return base64_1.base64decode(json); - } - } - catch (error) { - e = error.message; - } - this.assert(false, fieldName + (e ? " - " + e : ""), json); + + // stat isn't reliable on windows; force 0755 for dir + if (win32 && isDir) { + data.mode = 493; } -} -exports.ReflectionJsonReader = ReflectionJsonReader; + } else if (data.mode === null) { + data.mode = isDir ? 493 : 420; + } + if (data.stats && data.date === null) { + data.date = data.stats.mtime; + } else { + data.date = util.dateify(data.date); + } -/***/ }), + return data; +}; -/***/ 94400: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/** + * Error listener that re-emits error on to our internal stream. + * + * @private + * @param {Error} err + * @return void + */ +Archiver.prototype._onModuleError = function(err) { + /** + * @event Archiver#error + * @type {ErrorData} + */ + this.emit('error', err); +}; -"use strict"; +/** + * Checks the various state variables after queue has drained to determine if + * we need to `finalize`. + * + * @private + * @return void + */ +Archiver.prototype._onQueueDrain = function() { + if (this._state.finalizing || this._state.finalized || this._state.aborted) { + return; + } + + if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { + this._finalize(); + } +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ReflectionJsonWriter = void 0; -const base64_1 = __nccwpck_require__(90943); -const pb_long_1 = __nccwpck_require__(19823); -const reflection_info_1 = __nccwpck_require__(48980); -const assert_1 = __nccwpck_require__(88501); /** - * Writes proto3 messages in canonical JSON format using reflection - * information. + * Appends each queue task to the module. * - * https://developers.google.com/protocol-buffers/docs/proto3#json + * @private + * @param {Object} task + * @param {Function} callback + * @return void */ -class ReflectionJsonWriter { - constructor(info) { - var _a; - this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; - } - /** - * Converts the message to a JSON object, based on the field descriptors. - */ - write(message, options) { - const json = {}, source = message; - for (const field of this.fields) { - // field is not part of a oneof, simply write as is - if (!field.oneof) { - let jsonValue = this.field(field, source[field.localName], options); - if (jsonValue !== undefined) - json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; - continue; - } - // field is part of a oneof - const group = source[field.oneof]; - if (group.oneofKind !== field.localName) - continue; // not selected, skip - const opt = field.kind == 'scalar' || field.kind == 'enum' - ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; - let jsonValue = this.field(field, group[field.localName], opt); - assert_1.assert(jsonValue !== undefined); - json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; - } - return json; +Archiver.prototype._onQueueTask = function(task, callback) { + var fullCallback = () => { + if(task.data.callback) { + task.data.callback(); } - field(field, value, options) { - let jsonValue = undefined; - if (field.kind == 'map') { - assert_1.assert(typeof value == "object" && value !== null); - const jsonObj = {}; - switch (field.V.kind) { - case "scalar": - for (const [entryKey, entryValue] of Object.entries(value)) { - const val = this.scalar(field.V.T, entryValue, field.name, false, true); - assert_1.assert(val !== undefined); - jsonObj[entryKey.toString()] = val; // JSON standard allows only (double quoted) string as property key - } - break; - case "message": - const messageType = field.V.T(); - for (const [entryKey, entryValue] of Object.entries(value)) { - const val = this.message(messageType, entryValue, field.name, options); - assert_1.assert(val !== undefined); - jsonObj[entryKey.toString()] = val; // JSON standard allows only (double quoted) string as property key - } - break; - case "enum": - const enumInfo = field.V.T(); - for (const [entryKey, entryValue] of Object.entries(value)) { - assert_1.assert(entryValue === undefined || typeof entryValue == 'number'); - const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); - assert_1.assert(val !== undefined); - jsonObj[entryKey.toString()] = val; // JSON standard allows only (double quoted) string as property key - } - break; - } - if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) - jsonValue = jsonObj; - } - else if (field.repeat) { - assert_1.assert(Array.isArray(value)); - const jsonArr = []; - switch (field.kind) { - case "scalar": - for (let i = 0; i < value.length; i++) { - const val = this.scalar(field.T, value[i], field.name, field.opt, true); - assert_1.assert(val !== undefined); - jsonArr.push(val); - } - break; - case "enum": - const enumInfo = field.T(); - for (let i = 0; i < value.length; i++) { - assert_1.assert(value[i] === undefined || typeof value[i] == 'number'); - const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); - assert_1.assert(val !== undefined); - jsonArr.push(val); - } - break; - case "message": - const messageType = field.T(); - for (let i = 0; i < value.length; i++) { - const val = this.message(messageType, value[i], field.name, options); - assert_1.assert(val !== undefined); - jsonArr.push(val); - } - break; - } - // add converted array to json output - if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) - jsonValue = jsonArr; - } - else { - switch (field.kind) { - case "scalar": - jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); - break; - case "enum": - jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); - break; - case "message": - jsonValue = this.message(field.T(), value, field.name, options); - break; - } - } - return jsonValue; + callback(); + } + + if (this._state.finalizing || this._state.finalized || this._state.aborted) { + fullCallback(); + return; + } + + this._task = task; + this._moduleAppend(task.source, task.data, fullCallback); +}; + +/** + * Performs a file stat and reinjects the task back into the queue. + * + * @private + * @param {Object} task + * @param {Function} callback + * @return void + */ +Archiver.prototype._onStatQueueTask = function(task, callback) { + if (this._state.finalizing || this._state.finalized || this._state.aborted) { + callback(); + return; + } + + fs.lstat(task.filepath, function(err, stats) { + if (this._state.aborted) { + setImmediate(callback); + return; } - /** - * Returns `null` as the default for google.protobuf.NullValue. - */ - enum(type, value, fieldName, optional, emitDefaultValues, enumAsInteger) { - if (type[0] == 'google.protobuf.NullValue') - return !emitDefaultValues && !optional ? undefined : null; - if (value === undefined) { - assert_1.assert(optional); - return undefined; - } - if (value === 0 && !emitDefaultValues && !optional) - // we require 0 to be default value for all enums - return undefined; - assert_1.assert(typeof value == 'number'); - assert_1.assert(Number.isInteger(value)); - if (enumAsInteger || !type[1].hasOwnProperty(value)) - // if we don't now the enum value, just return the number - return value; - if (type[2]) - // restore the dropped prefix - return type[2] + type[1][value]; - return type[1][value]; + + if (err) { + this._entriesCount--; + + /** + * @event Archiver#warning + * @type {ErrorData} + */ + this.emit('warning', err); + setImmediate(callback); + return; } - message(type, value, fieldName, options) { - if (value === undefined) - return options.emitDefaultValues ? null : undefined; - return type.internalJsonWrite(value, options); + + task = this._updateQueueTaskWithStats(task, stats); + + if (task) { + if (stats.size) { + this._fsEntriesTotalBytes += stats.size; + } + + this._queue.push(task); } - scalar(type, value, fieldName, optional, emitDefaultValues) { - if (value === undefined) { - assert_1.assert(optional); - return undefined; - } - const ed = emitDefaultValues || optional; - // noinspection FallThroughInSwitchStatementJS - switch (type) { - // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - if (value === 0) - return ed ? 0 : undefined; - assert_1.assertInt32(value); - return value; - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.UINT32: - if (value === 0) - return ed ? 0 : undefined; - assert_1.assertUInt32(value); - return value; - // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". - // Either numbers or strings are accepted. Exponent notation is also accepted. - case reflection_info_1.ScalarType.FLOAT: - assert_1.assertFloat32(value); - case reflection_info_1.ScalarType.DOUBLE: - if (value === 0) - return ed ? 0 : undefined; - assert_1.assert(typeof value == 'number'); - if (Number.isNaN(value)) - return 'NaN'; - if (value === Number.POSITIVE_INFINITY) - return 'Infinity'; - if (value === Number.NEGATIVE_INFINITY) - return '-Infinity'; - return value; - // string: - case reflection_info_1.ScalarType.STRING: - if (value === "") - return ed ? '' : undefined; - assert_1.assert(typeof value == 'string'); - return value; - // bool: - case reflection_info_1.ScalarType.BOOL: - if (value === false) - return ed ? false : undefined; - assert_1.assert(typeof value == 'boolean'); - return value; - // JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - assert_1.assert(typeof value == 'number' || typeof value == 'string' || typeof value == 'bigint'); - let ulong = pb_long_1.PbULong.from(value); - if (ulong.isZero() && !ed) - return undefined; - return ulong.toString(); - // JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - assert_1.assert(typeof value == 'number' || typeof value == 'string' || typeof value == 'bigint'); - let long = pb_long_1.PbLong.from(value); - if (long.isZero() && !ed) - return undefined; - return long.toString(); - // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. - // Either standard or URL-safe base64 encoding with/without paddings are accepted. - case reflection_info_1.ScalarType.BYTES: - assert_1.assert(value instanceof Uint8Array); - if (!value.byteLength) - return ed ? "" : undefined; - return base64_1.base64encode(value); - } - } -} -exports.ReflectionJsonWriter = ReflectionJsonWriter; + setImmediate(callback); + }.bind(this)); +}; -/***/ }), +/** + * Unpipes the module and ends our internal stream. + * + * @private + * @return void + */ +Archiver.prototype._shutdown = function() { + this._moduleUnpipe(); + this.end(); +}; -/***/ 75194: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/** + * Tracks the bytes emitted by our internal stream. + * + * @private + * @param {Buffer} chunk + * @param {String} encoding + * @param {Function} callback + * @return void + */ +Archiver.prototype._transform = function(chunk, encoding, callback) { + if (chunk) { + this._pointer += chunk.length; + } -"use strict"; + callback(null, chunk); +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.reflectionLongConvert = void 0; -const reflection_info_1 = __nccwpck_require__(48980); /** - * Utility method to convert a PbLong or PbUlong to a JavaScript - * representation during runtime. + * Updates and normalizes a queue task using stats data. * - * Works with generated field information, `undefined` is equivalent - * to `STRING`. + * @private + * @param {Object} task + * @param {fs.Stats} stats + * @return {Object} */ -function reflectionLongConvert(long, type) { - switch (type) { - case reflection_info_1.LongType.BIGINT: - return long.toBigInt(); - case reflection_info_1.LongType.NUMBER: - return long.toNumber(); - default: - // case undefined: - // case LongType.STRING: - return long.toString(); +Archiver.prototype._updateQueueTaskWithStats = function(task, stats) { + if (stats.isFile()) { + task.data.type = 'file'; + task.data.sourceType = 'stream'; + task.source = util.lazyReadStream(task.filepath); + } else if (stats.isDirectory() && this._moduleSupports('directory')) { + task.data.name = util.trailingSlashIt(task.data.name); + task.data.type = 'directory'; + task.data.sourcePath = util.trailingSlashIt(task.filepath); + task.data.sourceType = 'buffer'; + task.source = Buffer.concat([]); + } else if (stats.isSymbolicLink() && this._moduleSupports('symlink')) { + var linkPath = fs.readlinkSync(task.filepath); + var dirName = path.dirname(task.filepath); + task.data.type = 'symlink'; + task.data.linkname = path.relative(dirName, path.resolve(dirName, linkPath)); + task.data.sourceType = 'buffer'; + task.source = Buffer.concat([]); + } else { + if (stats.isDirectory()) { + this.emit('warning', new ArchiverError('DIRECTORYNOTSUPPORTED', task.data)); + } else if (stats.isSymbolicLink()) { + this.emit('warning', new ArchiverError('SYMLINKNOTSUPPORTED', task.data)); + } else { + this.emit('warning', new ArchiverError('ENTRYNOTSUPPORTED', task.data)); } -} -exports.reflectionLongConvert = reflectionLongConvert; + return null; + } -/***/ }), - -/***/ 57541: -/***/ ((__unused_webpack_module, exports) => { + task.data = this._normalizeEntryData(task.data, stats); -"use strict"; + return task; +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.reflectionMergePartial = void 0; /** - * Copy partial data into the target message. + * Aborts the archiving process, taking a best-effort approach, by: * - * If a singular scalar or enum field is present in the source, it - * replaces the field in the target. + * - removing any pending queue tasks + * - allowing any active queue workers to finish + * - detaching internal module pipes + * - ending both sides of the Transform stream * - * If a singular message field is present in the source, it is merged - * with the target field by calling mergePartial() of the responsible - * message type. + * It will NOT drain any remaining sources. * - * If a repeated field is present in the source, its values replace - * all values in the target array, removing extraneous values. - * Repeated message fields are copied, not merged. + * @return {this} + */ +Archiver.prototype.abort = function() { + if (this._state.aborted || this._state.finalized) { + return this; + } + + this._abort(); + + return this; +}; + +/** + * Appends an input source (text string, buffer, or stream) to the instance. * - * If a map field is present in the source, entries are added to the - * target map, replacing entries with the same key. Entries that only - * exist in the target remain. Entries with message values are copied, - * not merged. + * When the instance has received, processed, and emitted the input, the `entry` + * event is fired. * - * Note that this function differs from protobuf merge semantics, - * which appends repeated fields. + * @fires Archiver#entry + * @param {(Buffer|Stream|String)} source The input source. + * @param {EntryData} data See also {@link ZipEntryData} and {@link TarEntryData}. + * @return {this} */ -function reflectionMergePartial(info, target, source) { - let fieldValue, // the field value we are working with - input = source, output; // where we want our field value to go - for (let field of info.fields) { - let name = field.localName; - if (field.oneof) { - const group = input[field.oneof]; // this is the oneof`s group in the source - if ((group === null || group === void 0 ? void 0 : group.oneofKind) == undefined) { // the user is free to omit - continue; // we skip this field, and all other members too - } - fieldValue = group[name]; // our value comes from the the oneof group of the source - output = target[field.oneof]; // and our output is the oneof group of the target - output.oneofKind = group.oneofKind; // always update discriminator - if (fieldValue == undefined) { - delete output[name]; // remove any existing value - continue; // skip further work on field - } - } - else { - fieldValue = input[name]; // we are using the source directly - output = target; // we want our field value to go directly into the target - if (fieldValue == undefined) { - continue; // skip further work on field, existing value is used as is - } - } - if (field.repeat) - output[name].length = fieldValue.length; // resize target array to match source array - // now we just work with `fieldValue` and `output` to merge the value - switch (field.kind) { - case "scalar": - case "enum": - if (field.repeat) - for (let i = 0; i < fieldValue.length; i++) - output[name][i] = fieldValue[i]; // not a reference type - else - output[name] = fieldValue; // not a reference type - break; - case "message": - let T = field.T(); - if (field.repeat) - for (let i = 0; i < fieldValue.length; i++) - output[name][i] = T.create(fieldValue[i]); - else if (output[name] === undefined) - output[name] = T.create(fieldValue); // nothing to merge with - else - T.mergePartial(output[name], fieldValue); - break; - case "map": - // Map and repeated fields are simply overwritten, not appended or merged - switch (field.V.kind) { - case "scalar": - case "enum": - Object.assign(output[name], fieldValue); // elements are not reference types - break; - case "message": - let T = field.V.T(); - for (let k of Object.keys(fieldValue)) - output[name][k] = T.create(fieldValue[k]); - break; - } - break; - } - } -} -exports.reflectionMergePartial = reflectionMergePartial; +Archiver.prototype.append = function(source, data) { + if (this._state.finalize || this._state.aborted) { + this.emit('error', new ArchiverError('QUEUECLOSED')); + return this; + } + data = this._normalizeEntryData(data); -/***/ }), + if (typeof data.name !== 'string' || data.name.length === 0) { + this.emit('error', new ArchiverError('ENTRYNAMEREQUIRED')); + return this; + } -/***/ 28441: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (data.type === 'directory' && !this._moduleSupports('directory')) { + this.emit('error', new ArchiverError('DIRECTORYNOTSUPPORTED', { name: data.name })); + return this; + } -"use strict"; + source = util.normalizeInputSource(source); + + if (Buffer.isBuffer(source)) { + data.sourceType = 'buffer'; + } else if (util.isStream(source)) { + data.sourceType = 'stream'; + } else { + this.emit('error', new ArchiverError('INPUTSTEAMBUFFERREQUIRED', { name: data.name })); + return this; + } + + this._entriesCount++; + this._queue.push({ + data: data, + source: source + }); + + return this; +}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.reflectionScalarDefault = void 0; -const reflection_info_1 = __nccwpck_require__(48980); -const reflection_long_convert_1 = __nccwpck_require__(75194); -const pb_long_1 = __nccwpck_require__(19823); /** - * Creates the default value for a scalar type. + * Appends a directory and its files, recursively, given its dirpath. + * + * @param {String} dirpath The source directory path. + * @param {String} destpath The destination path within the archive. + * @param {(EntryData|Function)} data See also [ZipEntryData]{@link ZipEntryData} and + * [TarEntryData]{@link TarEntryData}. + * @return {this} */ -function reflectionScalarDefault(type, longType = reflection_info_1.LongType.STRING) { - switch (type) { - case reflection_info_1.ScalarType.BOOL: - return false; - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - return 0.0; - case reflection_info_1.ScalarType.BYTES: - return new Uint8Array(0); - case reflection_info_1.ScalarType.STRING: - return ""; - default: - // case ScalarType.INT32: - // case ScalarType.UINT32: - // case ScalarType.SINT32: - // case ScalarType.FIXED32: - // case ScalarType.SFIXED32: - return 0; - } -} -exports.reflectionScalarDefault = reflectionScalarDefault; +Archiver.prototype.directory = function(dirpath, destpath, data) { + if (this._state.finalize || this._state.aborted) { + this.emit('error', new ArchiverError('QUEUECLOSED')); + return this; + } + if (typeof dirpath !== 'string' || dirpath.length === 0) { + this.emit('error', new ArchiverError('DIRECTORYDIRPATHREQUIRED')); + return this; + } -/***/ }), + this._pending++; -/***/ 22526: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + if (destpath === false) { + destpath = ''; + } else if (typeof destpath !== 'string'){ + destpath = dirpath; + } -"use strict"; + var dataFunction = false; + if (typeof data === 'function') { + dataFunction = data; + data = {}; + } else if (typeof data !== 'object') { + data = {}; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.ReflectionTypeCheck = void 0; -const reflection_info_1 = __nccwpck_require__(48980); -const oneof_1 = __nccwpck_require__(52468); -// noinspection JSMethodCanBeStatic -class ReflectionTypeCheck { - constructor(info) { - var _a; - this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; - } - prepare() { - if (this.data) - return; - const req = [], known = [], oneofs = []; - for (let field of this.fields) { - if (field.oneof) { - if (!oneofs.includes(field.oneof)) { - oneofs.push(field.oneof); - req.push(field.oneof); - known.push(field.oneof); - } - } - else { - known.push(field.localName); - switch (field.kind) { - case "scalar": - case "enum": - if (!field.opt || field.repeat) - req.push(field.localName); - break; - case "message": - if (field.repeat) - req.push(field.localName); - break; - case "map": - req.push(field.localName); - break; - } - } + var globOptions = { + stat: true, + dot: true + }; + + function onGlobEnd() { + this._pending--; + this._maybeFinalize(); + } + + function onGlobError(err) { + this.emit('error', err); + } + + function onGlobMatch(match){ + globber.pause(); + + var ignoreMatch = false; + var entryData = Object.assign({}, data); + entryData.name = match.relative; + entryData.prefix = destpath; + entryData.stats = match.stat; + entryData.callback = globber.resume.bind(globber); + + try { + if (dataFunction) { + entryData = dataFunction(entryData); + + if (entryData === false) { + ignoreMatch = true; + } else if (typeof entryData !== 'object') { + throw new ArchiverError('DIRECTORYFUNCTIONINVALIDDATA', { dirpath: dirpath }); } - this.data = { req, known, oneofs: Object.values(oneofs) }; + } + } catch(e) { + this.emit('error', e); + return; } - /** - * Is the argument a valid message as specified by the - * reflection information? - * - * Checks all field types recursively. The `depth` - * specifies how deep into the structure the check will be. - * - * With a depth of 0, only the presence of fields - * is checked. - * - * With a depth of 1 or more, the field types are checked. - * - * With a depth of 2 or more, the members of map, repeated - * and message fields are checked. - * - * Message fields will be checked recursively with depth - 1. - * - * The number of map entries / repeated values being checked - * is < depth. - */ - is(message, depth, allowExcessProperties = false) { - if (depth < 0) - return true; - if (message === null || message === undefined || typeof message != 'object') - return false; - this.prepare(); - let keys = Object.keys(message), data = this.data; - // if a required field is missing in arg, this cannot be a T - if (keys.length < data.req.length || data.req.some(n => !keys.includes(n))) - return false; - if (!allowExcessProperties) { - // if the arg contains a key we dont know, this is not a literal T - if (keys.some(k => !data.known.includes(k))) - return false; - } - // "With a depth of 0, only the presence and absence of fields is checked." - // "With a depth of 1 or more, the field types are checked." - if (depth < 1) { - return true; - } - // check oneof group - for (const name of data.oneofs) { - const group = message[name]; - if (!oneof_1.isOneofGroup(group)) - return false; - if (group.oneofKind === undefined) - continue; - const field = this.fields.find(f => f.localName === group.oneofKind); - if (!field) - return false; // we found no field, but have a kind, something is wrong - if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) - return false; - } - // check types - for (const field of this.fields) { - if (field.oneof !== undefined) - continue; - if (!this.field(message[field.localName], field, allowExcessProperties, depth)) - return false; - } - return true; + + if (ignoreMatch) { + globber.resume(); + return; } - field(arg, field, allowExcessProperties, depth) { - let repeated = field.repeat; - switch (field.kind) { - case "scalar": - if (arg === undefined) - return field.opt; - if (repeated) - return this.scalars(arg, field.T, depth, field.L); - return this.scalar(arg, field.T, field.L); - case "enum": - if (arg === undefined) - return field.opt; - if (repeated) - return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); - return this.scalar(arg, reflection_info_1.ScalarType.INT32); - case "message": - if (arg === undefined) - return true; - if (repeated) - return this.messages(arg, field.T(), allowExcessProperties, depth); - return this.message(arg, field.T(), allowExcessProperties, depth); - case "map": - if (typeof arg != 'object' || arg === null) - return false; - if (depth < 2) - return true; - if (!this.mapKeys(arg, field.K, depth)) - return false; - switch (field.V.kind) { - case "scalar": - return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); - case "enum": - return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); - case "message": - return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); - } - break; - } - return true; - } - message(arg, type, allowExcessProperties, depth) { - if (allowExcessProperties) { - return type.isAssignable(arg, depth); - } - return type.is(arg, depth); - } - messages(arg, type, allowExcessProperties, depth) { - if (!Array.isArray(arg)) - return false; - if (depth < 2) - return true; - if (allowExcessProperties) { - for (let i = 0; i < arg.length && i < depth; i++) - if (!type.isAssignable(arg[i], depth - 1)) - return false; - } - else { - for (let i = 0; i < arg.length && i < depth; i++) - if (!type.is(arg[i], depth - 1)) - return false; - } - return true; - } - scalar(arg, type, longType) { - let argType = typeof arg; - switch (type) { - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - switch (longType) { - case reflection_info_1.LongType.BIGINT: - return argType == "bigint"; - case reflection_info_1.LongType.NUMBER: - return argType == "number" && !isNaN(arg); - default: - return argType == "string"; - } - case reflection_info_1.ScalarType.BOOL: - return argType == 'boolean'; - case reflection_info_1.ScalarType.STRING: - return argType == 'string'; - case reflection_info_1.ScalarType.BYTES: - return arg instanceof Uint8Array; - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - return argType == 'number' && !isNaN(arg); - default: - // case ScalarType.UINT32: - // case ScalarType.FIXED32: - // case ScalarType.INT32: - // case ScalarType.SINT32: - // case ScalarType.SFIXED32: - return argType == 'number' && Number.isInteger(arg); - } - } - scalars(arg, type, depth, longType) { - if (!Array.isArray(arg)) - return false; - if (depth < 2) - return true; - if (Array.isArray(arg)) - for (let i = 0; i < arg.length && i < depth; i++) - if (!this.scalar(arg[i], type, longType)) - return false; - return true; - } - mapKeys(map, type, depth) { - let keys = Object.keys(map); - switch (type) { - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - case reflection_info_1.ScalarType.UINT32: - return this.scalars(keys.slice(0, depth).map(k => parseInt(k)), type, depth); - case reflection_info_1.ScalarType.BOOL: - return this.scalars(keys.slice(0, depth).map(k => k == 'true' ? true : k == 'false' ? false : k), type, depth); - default: - return this.scalars(keys, type, depth, reflection_info_1.LongType.STRING); - } - } -} -exports.ReflectionTypeCheck = ReflectionTypeCheck; + this._append(match.absolute, entryData); + } -/***/ }), + var globber = glob(dirpath, globOptions); + globber.on('error', onGlobError.bind(this)); + globber.on('match', onGlobMatch.bind(this)); + globber.on('end', onGlobEnd.bind(this)); -/***/ 43747: -/***/ ((module, exports, __nccwpck_require__) => { + return this; +}; -"use strict"; /** - * @author Toru Nagashima - * See LICENSE file in root directory for full license. + * Appends a file given its filepath using a + * [lazystream]{@link https://github.com/jpommerening/node-lazystream} wrapper to + * prevent issues with open file limits. + * + * When the instance has received, processed, and emitted the file, the `entry` + * event is fired. + * + * @param {String} filepath The source filepath. + * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and + * [TarEntryData]{@link TarEntryData}. + * @return {this} */ +Archiver.prototype.file = function(filepath, data) { + if (this._state.finalize || this._state.aborted) { + this.emit('error', new ArchiverError('QUEUECLOSED')); + return this; + } + if (typeof filepath !== 'string' || filepath.length === 0) { + this.emit('error', new ArchiverError('FILEFILEPATHREQUIRED')); + return this; + } -Object.defineProperty(exports, "__esModule", ({ value: true })); + this._append(filepath, data); -var eventTargetShim = __nccwpck_require__(79752); + return this; +}; /** - * The signal class. - * @see https://dom.spec.whatwg.org/#abortsignal - */ -class AbortSignal extends eventTargetShim.EventTarget { - /** - * AbortSignal cannot be constructed directly. - */ - constructor() { - super(); - throw new TypeError("AbortSignal cannot be constructed directly"); - } - /** - * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. - */ - get aborted() { - const aborted = abortedFlags.get(this); - if (typeof aborted !== "boolean") { - throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); - } - return aborted; - } -} -eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); -/** - * Create an AbortSignal object. - */ -function createAbortSignal() { - const signal = Object.create(AbortSignal.prototype); - eventTargetShim.EventTarget.call(signal); - abortedFlags.set(signal, false); - return signal; -} -/** - * Abort a given signal. - */ -function abortSignal(signal) { - if (abortedFlags.get(signal) !== false) { - return; - } - abortedFlags.set(signal, true); - signal.dispatchEvent({ type: "abort" }); -} -/** - * Aborted flag for each instances. + * Appends multiple files that match a glob pattern. + * + * @param {String} pattern The [glob pattern]{@link https://github.com/isaacs/minimatch} to match. + * @param {Object} options See [node-readdir-glob]{@link https://github.com/yqnn/node-readdir-glob#options}. + * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and + * [TarEntryData]{@link TarEntryData}. + * @return {this} */ -const abortedFlags = new WeakMap(); -// Properties should be enumerable. -Object.defineProperties(AbortSignal.prototype, { - aborted: { enumerable: true }, -}); -// `toString()` should return `"[object AbortSignal]"` -if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { - configurable: true, - value: "AbortSignal", - }); -} +Archiver.prototype.glob = function(pattern, options, data) { + this._pending++; -/** - * The AbortController. - * @see https://dom.spec.whatwg.org/#abortcontroller - */ -class AbortController { - /** - * Initialize this controller. - */ - constructor() { - signals.set(this, createAbortSignal()); - } - /** - * Returns the `AbortSignal` object associated with this object. - */ - get signal() { - return getSignal(this); - } - /** - * Abort and signal to any observers that the associated activity is to be aborted. - */ - abort() { - abortSignal(getSignal(this)); - } -} -/** - * Associated signals. - */ -const signals = new WeakMap(); -/** - * Get the associated signal of a given controller. - */ -function getSignal(controller) { - const signal = signals.get(controller); - if (signal == null) { - throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); - } - return signal; -} -// Properties should be enumerable. -Object.defineProperties(AbortController.prototype, { - signal: { enumerable: true }, - abort: { enumerable: true }, -}); -if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { - configurable: true, - value: "AbortController", - }); -} + options = util.defaults(options, { + stat: true, + pattern: pattern + }); -exports.AbortController = AbortController; -exports.AbortSignal = AbortSignal; -exports["default"] = AbortController; + function onGlobEnd() { + this._pending--; + this._maybeFinalize(); + } -module.exports = AbortController -module.exports.AbortController = module.exports["default"] = AbortController -module.exports.AbortSignal = AbortSignal -//# sourceMappingURL=abort-controller.js.map + function onGlobError(err) { + this.emit('error', err); + } + function onGlobMatch(match){ + globber.pause(); + var entryData = Object.assign({}, data); + entryData.callback = globber.resume.bind(globber); + entryData.stats = match.stat; + entryData.name = match.relative; -/***/ }), + this._append(match.absolute, entryData); + } -/***/ 14637: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var globber = glob(options.cwd || '.', options); + globber.on('error', onGlobError.bind(this)); + globber.on('match', onGlobMatch.bind(this)); + globber.on('end', onGlobEnd.bind(this)); + + return this; +}; /** - * archiver-utils + * Finalizes the instance and prevents further appending to the archive + * structure (queue will continue til drained). * - * Copyright (c) 2012-2014 Chris Talkington, contributors. - * Licensed under the MIT license. - * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT + * The `end`, `close` or `finish` events on the destination stream may fire + * right after calling this method so you should set listeners beforehand to + * properly detect stream completion. + * + * @return {Promise} */ -var fs = __nccwpck_require__(94412); -var path = __nccwpck_require__(71017); +Archiver.prototype.finalize = function() { + if (this._state.aborted) { + var abortedError = new ArchiverError('ABORTED'); + this.emit('error', abortedError); + return Promise.reject(abortedError); + } -var flatten = __nccwpck_require__(95350); -var difference = __nccwpck_require__(46585); -var union = __nccwpck_require__(42513); -var isPlainObject = __nccwpck_require__(94280); + if (this._state.finalize) { + var finalizingError = new ArchiverError('FINALIZING'); + this.emit('error', finalizingError); + return Promise.reject(finalizingError); + } -var glob = __nccwpck_require__(14882); + this._state.finalize = true; -var file = module.exports = {}; + if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { + this._finalize(); + } -var pathSeparatorRe = /[\/\\]/g; + var self = this; -// Process specified wildcard glob patterns or filenames against a -// callback, excluding and uniquing files in the result set. -var processPatterns = function(patterns, fn) { - // Filepaths to return. - var result = []; - // Iterate over flattened patterns array. - flatten(patterns).forEach(function(pattern) { - // If the first character is ! it should be omitted - var exclusion = pattern.indexOf('!') === 0; - // If the pattern is an exclusion, remove the ! - if (exclusion) { pattern = pattern.slice(1); } - // Find all matching files for this pattern. - var matches = fn(pattern); - if (exclusion) { - // If an exclusion, remove matching files. - result = difference(result, matches); - } else { - // Otherwise add matching files. - result = union(result, matches); - } - }); - return result; -}; + return new Promise(function(resolve, reject) { + var errored; -// True if the file path exists. -file.exists = function() { - var filepath = path.join.apply(path, arguments); - return fs.existsSync(filepath); + self._module.on('end', function() { + if (!errored) { + resolve(); + } + }) + + self._module.on('error', function(err) { + errored = true; + reject(err); + }) + }) }; -// Return an array of all file paths that match the given wildcard patterns. -file.expand = function(...args) { - // If the first argument is an options object, save those options to pass - // into the File.prototype.glob.sync method. - var options = isPlainObject(args[0]) ? args.shift() : {}; - // Use the first argument if it's an Array, otherwise convert the arguments - // object to an array and use that. - var patterns = Array.isArray(args[0]) ? args[0] : args; - // Return empty set if there are no patterns or filepaths. - if (patterns.length === 0) { return []; } - // Return all matching filepaths. - var matches = processPatterns(patterns, function(pattern) { - // Find all matching files for this pattern. - return glob.sync(pattern, options); - }); - // Filter result set? - if (options.filter) { - matches = matches.filter(function(filepath) { - filepath = path.join(options.cwd || '', filepath); - try { - if (typeof options.filter === 'function') { - return options.filter(filepath); - } else { - // If the file is of the right type and exists, this should work. - return fs.statSync(filepath)[options.filter](); - } - } catch(e) { - // Otherwise, it's probably not the right type. - return false; - } - }); +/** + * Sets the module format name used for archiving. + * + * @param {String} format The name of the format. + * @return {this} + */ +Archiver.prototype.setFormat = function(format) { + if (this._format) { + this.emit('error', new ArchiverError('FORMATSET')); + return this; } - return matches; -}; -// Build a multi task "files" object dynamically. -file.expandMapping = function(patterns, destBase, options) { - options = Object.assign({ - rename: function(destBase, destPath) { - return path.join(destBase || '', destPath); - } - }, options); - var files = []; - var fileByDest = {}; - // Find all files matching pattern, using passed-in options. - file.expand(options, patterns).forEach(function(src) { - var destPath = src; - // Flatten? - if (options.flatten) { - destPath = path.basename(destPath); - } - // Change the extension? - if (options.ext) { - destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); - } - // Generate destination filename. - var dest = options.rename(destBase, destPath, options); - // Prepend cwd to src path if necessary. - if (options.cwd) { src = path.join(options.cwd, src); } - // Normalize filepaths to be unix-style. - dest = dest.replace(pathSeparatorRe, '/'); - src = src.replace(pathSeparatorRe, '/'); - // Map correct src path to dest path. - if (fileByDest[dest]) { - // If dest already exists, push this src onto that dest's src array. - fileByDest[dest].src.push(src); - } else { - // Otherwise create a new src-dest file mapping object. - files.push({ - src: [src], - dest: dest, - }); - // And store a reference for later use. - fileByDest[dest] = files[files.length - 1]; - } - }); - return files; -}; + this._format = format; -// reusing bits of grunt's multi-task source normalization -file.normalizeFilesArray = function(data) { - var files = []; + return this; +}; - data.forEach(function(obj) { - var prop; - if ('src' in obj || 'dest' in obj) { - files.push(obj); - } - }); +/** + * Sets the module used for archiving. + * + * @param {Function} module The function for archiver to interact with. + * @return {this} + */ +Archiver.prototype.setModule = function(module) { + if (this._state.aborted) { + this.emit('error', new ArchiverError('ABORTED')); + return this; + } - if (files.length === 0) { - return []; + if (this._state.module) { + this.emit('error', new ArchiverError('MODULESET')); + return this; } - files = _(files).chain().forEach(function(obj) { - if (!('src' in obj) || !obj.src) { return; } - // Normalize .src properties to flattened array. - if (Array.isArray(obj.src)) { - obj.src = flatten(obj.src); - } else { - obj.src = [obj.src]; - } - }).map(function(obj) { - // Build options object, removing unwanted properties. - var expandOptions = Object.assign({}, obj); - delete expandOptions.src; - delete expandOptions.dest; + this._module = module; + this._modulePipe(); - // Expand file mappings. - if (obj.expand) { - return file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) { - // Copy obj properties to result. - var result = Object.assign({}, obj); - // Make a clone of the orig obj available. - result.orig = Object.assign({}, obj); - // Set .src and .dest, processing both as templates. - result.src = mapObj.src; - result.dest = mapObj.dest; - // Remove unwanted properties. - ['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) { - delete result[prop]; - }); - return result; - }); - } - - // Copy obj properties to result, adding an .orig property. - var result = Object.assign({}, obj); - // Make a clone of the orig obj available. - result.orig = Object.assign({}, obj); - - if ('src' in result) { - // Expose an expand-on-demand getter method as .src. - Object.defineProperty(result, 'src', { - enumerable: true, - get: function fn() { - var src; - if (!('result' in fn)) { - src = obj.src; - // If src is an array, flatten it. Otherwise, make it into an array. - src = Array.isArray(src) ? flatten(src) : [src]; - // Expand src files, memoizing result. - fn.result = file.expand(expandOptions, src); - } - return fn.result; - } - }); - } - - if ('dest' in result) { - result.dest = obj.dest; - } - - return result; - }).flatten().value(); - - return files; + return this; }; - -/***/ }), - -/***/ 6582: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - /** - * archiver-utils + * Appends a symlink to the instance. * - * Copyright (c) 2015 Chris Talkington. - * Licensed under the MIT license. - * https://github.com/archiverjs/archiver-utils/blob/master/LICENSE + * This does NOT interact with filesystem and is used for programmatically creating symlinks. + * + * @param {String} filepath The symlink path (within archive). + * @param {String} target The target path (within archive). + * @param {Number} mode Sets the entry permissions. + * @return {this} */ -var fs = __nccwpck_require__(94412); -var path = __nccwpck_require__(71017); -var isStream = __nccwpck_require__(78239); -var lazystream = __nccwpck_require__(64148); -var normalizePath = __nccwpck_require__(51587); -var defaults = __nccwpck_require__(30484); - -var Stream = (__nccwpck_require__(12781).Stream); -var PassThrough = (__nccwpck_require__(92567).PassThrough); - -var utils = module.exports = {}; -utils.file = __nccwpck_require__(14637); - -utils.collectStream = function(source, callback) { - var collection = []; - var size = 0; - - source.on('error', callback); - - source.on('data', function(chunk) { - collection.push(chunk); - size += chunk.length; - }); +Archiver.prototype.symlink = function(filepath, target, mode) { + if (this._state.finalize || this._state.aborted) { + this.emit('error', new ArchiverError('QUEUECLOSED')); + return this; + } - source.on('end', function() { - var buf = Buffer.alloc(size); - var offset = 0; + if (typeof filepath !== 'string' || filepath.length === 0) { + this.emit('error', new ArchiverError('SYMLINKFILEPATHREQUIRED')); + return this; + } - collection.forEach(function(data) { - data.copy(buf, offset); - offset += data.length; - }); + if (typeof target !== 'string' || target.length === 0) { + this.emit('error', new ArchiverError('SYMLINKTARGETREQUIRED', { filepath: filepath })); + return this; + } - callback(null, buf); - }); -}; + if (!this._moduleSupports('symlink')) { + this.emit('error', new ArchiverError('SYMLINKNOTSUPPORTED', { filepath: filepath })); + return this; + } -utils.dateify = function(dateish) { - dateish = dateish || new Date(); + var data = {}; + data.type = 'symlink'; + data.name = filepath.replace(/\\/g, '/'); + data.linkname = target.replace(/\\/g, '/'); + data.sourceType = 'buffer'; - if (dateish instanceof Date) { - dateish = dateish; - } else if (typeof dateish === 'string') { - dateish = new Date(dateish); - } else { - dateish = new Date(); + if (typeof mode === "number") { + data.mode = mode; } - return dateish; -}; - -// this is slightly different from lodash version -utils.defaults = function(object, source, guard) { - var args = arguments; - args[0] = args[0] || {}; + this._entriesCount++; + this._queue.push({ + data: data, + source: Buffer.concat([]) + }); - return defaults(...args); + return this; }; -utils.isStream = function(source) { - return isStream(source); +/** + * Returns the current length (in bytes) that has been emitted. + * + * @return {Number} + */ +Archiver.prototype.pointer = function() { + return this._pointer; }; -utils.lazyReadStream = function(filepath) { - return new lazystream.Readable(function() { - return fs.createReadStream(filepath); - }); +/** + * Middleware-like helper that has yet to be fully implemented. + * + * @private + * @param {Function} plugin + * @return {this} + */ +Archiver.prototype.use = function(plugin) { + this._streams.push(plugin); + return this; }; -utils.normalizeInputSource = function(source) { - if (source === null) { - return Buffer.alloc(0); - } else if (typeof source === 'string') { - return Buffer.from(source); - } else if (utils.isStream(source)) { - // Always pipe through a PassThrough stream to guarantee pausing the stream if it's already flowing, - // since it will only be processed in a (distant) future iteration of the event loop, and will lose - // data if already flowing now. - return source.pipe(new PassThrough()); - } - - return source; -}; +module.exports = Archiver; -utils.sanitizePath = function(filepath) { - return normalizePath(filepath, false).replace(/^\w+:/, '').replace(/^(\.\.\/|\/)+/, ''); -}; +/** + * @typedef {Object} CoreOptions + * @global + * @property {Number} [statConcurrency=4] Sets the number of workers used to + * process the internal fs stat queue. + */ -utils.trailingSlashIt = function(str) { - return str.slice(-1) !== '/' ? str + '/' : str; -}; +/** + * @typedef {Object} TransformOptions + * @property {Boolean} [allowHalfOpen=true] If set to false, then the stream + * will automatically end the readable side when the writable side ends and vice + * versa. + * @property {Boolean} [readableObjectMode=false] Sets objectMode for readable + * side of the stream. Has no effect if objectMode is true. + * @property {Boolean} [writableObjectMode=false] Sets objectMode for writable + * side of the stream. Has no effect if objectMode is true. + * @property {Boolean} [decodeStrings=true] Whether or not to decode strings + * into Buffers before passing them to _write(). `Writable` + * @property {String} [encoding=NULL] If specified, then buffers will be decoded + * to strings using the specified encoding. `Readable` + * @property {Number} [highWaterMark=16kb] The maximum number of bytes to store + * in the internal buffer before ceasing to read from the underlying resource. + * `Readable` `Writable` + * @property {Boolean} [objectMode=false] Whether this stream should behave as a + * stream of objects. Meaning that stream.read(n) returns a single value instead + * of a Buffer of size n. `Readable` `Writable` + */ -utils.unixifyPath = function(filepath) { - return normalizePath(filepath, false).replace(/^\w+:/, ''); -}; +/** + * @typedef {Object} EntryData + * @property {String} name Sets the entry name including internal path. + * @property {(String|Date)} [date=NOW()] Sets the entry date. + * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. + * @property {String} [prefix] Sets a path prefix for the entry name. Useful + * when working with methods like `directory` or `glob`. + * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing + * for reduction of fs stat calls when stat data is already known. + */ -utils.walkdir = function(dirpath, base, callback) { - var results = []; +/** + * @typedef {Object} ErrorData + * @property {String} message The message of the error. + * @property {String} code The error code assigned to this error. + * @property {String} data Additional data provided for reporting or debugging (where available). + */ - if (typeof base === 'function') { - callback = base; - base = dirpath; - } +/** + * @typedef {Object} ProgressData + * @property {Object} entries + * @property {Number} entries.total Number of entries that have been appended. + * @property {Number} entries.processed Number of entries that have been processed. + * @property {Object} fs + * @property {Number} fs.totalBytes Number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) + * @property {Number} fs.processedBytes Number of bytes that have been processed. (based on fs.Stats) + */ - fs.readdir(dirpath, function(err, list) { - var i = 0; - var file; - var filepath; - if (err) { - return callback(err); - } +/***/ }), - (function next() { - file = list[i++]; +/***/ 79291: +/***/ ((module, exports, __nccwpck_require__) => { - if (!file) { - return callback(null, results); - } +/** + * Archiver Core + * + * @ignore + * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} + * @copyright (c) 2012-2014 Chris Talkington, contributors. + */ - filepath = path.join(dirpath, file); +var util = __nccwpck_require__(73837); - fs.stat(filepath, function(err, stats) { - results.push({ - path: filepath, - relative: path.relative(base, filepath).replace(/\\/g, '/'), - stats: stats - }); +const ERROR_CODES = { + 'ABORTED': 'archive was aborted', + 'DIRECTORYDIRPATHREQUIRED': 'diretory dirpath argument must be a non-empty string value', + 'DIRECTORYFUNCTIONINVALIDDATA': 'invalid data returned by directory custom data function', + 'ENTRYNAMEREQUIRED': 'entry name must be a non-empty string value', + 'FILEFILEPATHREQUIRED': 'file filepath argument must be a non-empty string value', + 'FINALIZING': 'archive already finalizing', + 'QUEUECLOSED': 'queue closed', + 'NOENDMETHOD': 'no suitable finalize/end method defined by module', + 'DIRECTORYNOTSUPPORTED': 'support for directory entries not defined by module', + 'FORMATSET': 'archive format already set', + 'INPUTSTEAMBUFFERREQUIRED': 'input source must be valid Stream or Buffer instance', + 'MODULESET': 'module already set', + 'SYMLINKNOTSUPPORTED': 'support for symlink entries not defined by module', + 'SYMLINKFILEPATHREQUIRED': 'symlink filepath argument must be a non-empty string value', + 'SYMLINKTARGETREQUIRED': 'symlink target argument must be a non-empty string value', + 'ENTRYNOTSUPPORTED': 'entry not supported' +}; - if (stats && stats.isDirectory()) { - utils.walkdir(filepath, base, function(err, res) { - if(err){ - return callback(err); - } +function ArchiverError(code, data) { + Error.captureStackTrace(this, this.constructor); + //this.name = this.constructor.name; + this.message = ERROR_CODES[code] || code; + this.code = code; + this.data = data; +} - res.forEach(function(dirEntry) { - results.push(dirEntry); - }); - - next(); - }); - } else { - next(); - } - }); - })(); - }); -}; +util.inherits(ArchiverError, Error); +exports = module.exports = ArchiverError; /***/ }), -/***/ 50126: +/***/ 61314: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** - * Archiver Vending + * JSON Format Plugin * - * @ignore + * @module plugins/json * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} * @copyright (c) 2012-2014 Chris Talkington, contributors. */ -var Archiver = __nccwpck_require__(32436); +var inherits = (__nccwpck_require__(73837).inherits); +var Transform = (__nccwpck_require__(32555).Transform); -var formats = {}; +var crc32 = __nccwpck_require__(21278); +var util = __nccwpck_require__(76358); /** - * Dispenses a new Archiver instance. - * * @constructor - * @param {String} format The archive format to use. - * @param {Object} options See [Archiver]{@link Archiver} - * @return {Archiver} + * @param {(JsonOptions|TransformOptions)} options */ -var vending = function(format, options) { - return vending.create(format, options); +var Json = function(options) { + if (!(this instanceof Json)) { + return new Json(options); + } + + options = this.options = util.defaults(options, {}); + + Transform.call(this, options); + + this.supports = { + directory: true, + symlink: true + }; + + this.files = []; }; +inherits(Json, Transform); + /** - * Creates a new Archiver instance. + * [_transform description] * - * @param {String} format The archive format to use. - * @param {Object} options See [Archiver]{@link Archiver} - * @return {Archiver} + * @private + * @param {Buffer} chunk + * @param {String} encoding + * @param {Function} callback + * @return void */ -vending.create = function(format, options) { - if (formats[format]) { - var instance = new Archiver(format, options); - instance.setFormat(format); - instance.setModule(new formats[format](options)); +Json.prototype._transform = function(chunk, encoding, callback) { + callback(null, chunk); +}; - return instance; - } else { - throw new Error('create(' + format + '): format not registered'); - } +/** + * [_writeStringified description] + * + * @private + * @return void + */ +Json.prototype._writeStringified = function() { + var fileString = JSON.stringify(this.files); + this.write(fileString); }; /** - * Registers a format for use with archiver. + * [append description] * - * @param {String} format The name of the format. - * @param {Function} module The function for archiver to interact with. + * @param {(Buffer|Stream)} source + * @param {EntryData} data + * @param {Function} callback * @return void */ -vending.registerFormat = function(format, module) { - if (formats[format]) { - throw new Error('register(' + format + '): format already registered'); - } +Json.prototype.append = function(source, data, callback) { + var self = this; - if (typeof module !== 'function') { - throw new Error('register(' + format + '): format module invalid'); - } + data.crc32 = 0; - if (typeof module.prototype.append !== 'function' || typeof module.prototype.finalize !== 'function') { - throw new Error('register(' + format + '): format module missing methods'); + function onend(err, sourceBuffer) { + if (err) { + callback(err); + return; + } + + data.size = sourceBuffer.length || 0; + data.crc32 = crc32.unsigned(sourceBuffer); + + self.files.push(data); + + callback(null, data); } - formats[format] = module; + if (data.sourceType === 'buffer') { + onend(null, source); + } else if (data.sourceType === 'stream') { + util.collectStream(source, onend); + } }; /** - * Check if the format is already registered. - * - * @param {String} format the name of the format. - * @return boolean + * [finalize description] + * + * @return void */ -vending.isRegisteredFormat = function (format) { - if (formats[format]) { - return true; - } - - return false; +Json.prototype.finalize = function() { + this._writeStringified(); + this.end(); }; -vending.registerFormat('zip', __nccwpck_require__(95653)); -vending.registerFormat('tar', __nccwpck_require__(2814)); -vending.registerFormat('json', __nccwpck_require__(28814)); +module.exports = Json; + +/** + * @typedef {Object} JsonOptions + * @global + */ -module.exports = vending; /***/ }), -/***/ 32436: +/***/ 83269: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** - * Archiver Core + * TAR Format Plugin * - * @ignore + * @module plugins/tar * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} * @copyright (c) 2012-2014 Chris Talkington, contributors. */ -var fs = __nccwpck_require__(57147); -var glob = __nccwpck_require__(64156); -var async = __nccwpck_require__(91788); -var path = __nccwpck_require__(71017); -var util = __nccwpck_require__(6582); - -var inherits = (__nccwpck_require__(73837).inherits); -var ArchiverError = __nccwpck_require__(96479); -var Transform = (__nccwpck_require__(92567).Transform); +var zlib = __nccwpck_require__(59796); -var win32 = process.platform === 'win32'; +var engine = __nccwpck_require__(26962); +var util = __nccwpck_require__(76358); /** * @constructor - * @param {String} format The archive format to use. - * @param {(CoreOptions|TransformOptions)} options See also {@link ZipOptions} and {@link TarOptions}. + * @param {TarOptions} options */ -var Archiver = function(format, options) { - if (!(this instanceof Archiver)) { - return new Archiver(format, options); - } - - if (typeof format !== 'string') { - options = format; - format = 'zip'; +var Tar = function(options) { + if (!(this instanceof Tar)) { + return new Tar(options); } options = this.options = util.defaults(options, { - highWaterMark: 1024 * 1024, - statConcurrency: 4 + gzip: false }); - Transform.call(this, options); - - this._format = false; - this._module = false; - this._pending = 0; - this._pointer = 0; - - this._entriesCount = 0; - this._entriesProcessedCount = 0; - this._fsEntriesTotalBytes = 0; - this._fsEntriesProcessedBytes = 0; - - this._queue = async.queue(this._onQueueTask.bind(this), 1); - this._queue.drain(this._onQueueDrain.bind(this)); - - this._statQueue = async.queue(this._onStatQueueTask.bind(this), options.statConcurrency); - this._statQueue.drain(this._onQueueDrain.bind(this)); + if (typeof options.gzipOptions !== 'object') { + options.gzipOptions = {}; + } - this._state = { - aborted: false, - finalize: false, - finalizing: false, - finalized: false, - modulePiped: false + this.supports = { + directory: true, + symlink: true }; - this._streams = []; -}; + this.engine = engine.pack(options); + this.compressor = false; -inherits(Archiver, Transform); + if (options.gzip) { + this.compressor = zlib.createGzip(options.gzipOptions); + this.compressor.on('error', this._onCompressorError.bind(this)); + } +}; /** - * Internal logic for `abort`. + * [_onCompressorError description] * * @private + * @param {Error} err * @return void */ -Archiver.prototype._abort = function() { - this._state.aborted = true; - this._queue.kill(); - this._statQueue.kill(); - - if (this._queue.idle()) { - this._shutdown(); - } +Tar.prototype._onCompressorError = function(err) { + this.engine.emit('error', err); }; /** - * Internal helper for appending files. + * [append description] * - * @private - * @param {String} filepath The source filepath. - * @param {EntryData} data The entry data. + * @param {(Buffer|Stream)} source + * @param {TarEntryData} data + * @param {Function} callback * @return void */ -Archiver.prototype._append = function(filepath, data) { - data = data || {}; +Tar.prototype.append = function(source, data, callback) { + var self = this; - var task = { - source: null, - filepath: filepath - }; + data.mtime = data.date; - if (!data.name) { - data.name = filepath; + function append(err, sourceBuffer) { + if (err) { + callback(err); + return; + } + + self.engine.entry(data, sourceBuffer, function(err) { + callback(err, data); + }); } - data.sourcePath = filepath; - task.data = data; - this._entriesCount++; + if (data.sourceType === 'buffer') { + append(null, source); + } else if (data.sourceType === 'stream' && data.stats) { + data.size = data.stats.size; - if (data.stats && data.stats instanceof fs.Stats) { - task = this._updateQueueTaskWithStats(task, data.stats); - if (task) { - if (data.stats.size) { - this._fsEntriesTotalBytes += data.stats.size; - } + var entry = self.engine.entry(data, function(err) { + callback(err, data); + }); - this._queue.push(task); - } - } else { - this._statQueue.push(task); + source.pipe(entry); + } else if (data.sourceType === 'stream') { + util.collectStream(source, append); } }; /** - * Internal logic for `finalize`. + * [finalize description] * - * @private * @return void */ -Archiver.prototype._finalize = function() { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - return; - } - - this._state.finalizing = true; - - this._moduleFinalize(); - - this._state.finalizing = false; - this._state.finalized = true; +Tar.prototype.finalize = function() { + this.engine.finalize(); }; /** - * Checks the various state variables to determine if we can `finalize`. + * [on description] * - * @private - * @return {Boolean} + * @return this.engine */ -Archiver.prototype._maybeFinalize = function() { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - return false; - } - - if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { - this._finalize(); - return true; - } - - return false; +Tar.prototype.on = function() { + return this.engine.on.apply(this.engine, arguments); }; /** - * Appends an entry to the module. + * [pipe description] * - * @private - * @fires Archiver#entry - * @param {(Buffer|Stream)} source - * @param {EntryData} data - * @param {Function} callback - * @return void + * @param {String} destination + * @param {Object} options + * @return this.engine */ -Archiver.prototype._moduleAppend = function(source, data, callback) { - if (this._state.aborted) { - callback(); - return; +Tar.prototype.pipe = function(destination, options) { + if (this.compressor) { + return this.engine.pipe.apply(this.engine, [this.compressor]).pipe(destination, options); + } else { + return this.engine.pipe.apply(this.engine, arguments); } - - this._module.append(source, data, function(err) { - this._task = null; - - if (this._state.aborted) { - this._shutdown(); - return; - } - - if (err) { - this.emit('error', err); - setImmediate(callback); - return; - } - - /** - * Fires when the entry's input has been processed and appended to the archive. - * - * @event Archiver#entry - * @type {EntryData} - */ - this.emit('entry', data); - this._entriesProcessedCount++; - - if (data.stats && data.stats.size) { - this._fsEntriesProcessedBytes += data.stats.size; - } - - /** - * @event Archiver#progress - * @type {ProgressData} - */ - this.emit('progress', { - entries: { - total: this._entriesCount, - processed: this._entriesProcessedCount - }, - fs: { - totalBytes: this._fsEntriesTotalBytes, - processedBytes: this._fsEntriesProcessedBytes - } - }); - - setImmediate(callback); - }.bind(this)); }; /** - * Finalizes the module. + * [unpipe description] * - * @private - * @return void + * @return this.engine */ -Archiver.prototype._moduleFinalize = function() { - if (typeof this._module.finalize === 'function') { - this._module.finalize(); - } else if (typeof this._module.end === 'function') { - this._module.end(); +Tar.prototype.unpipe = function() { + if (this.compressor) { + return this.compressor.unpipe.apply(this.compressor, arguments); } else { - this.emit('error', new ArchiverError('NOENDMETHOD')); + return this.engine.unpipe.apply(this.engine, arguments); } }; -/** - * Pipes the module to our internal stream with error bubbling. - * - * @private - * @return void - */ -Archiver.prototype._modulePipe = function() { - this._module.on('error', this._onModuleError.bind(this)); - this._module.pipe(this); - this._state.modulePiped = true; -}; +module.exports = Tar; /** - * Determines if the current module supports a defined feature. - * - * @private - * @param {String} key - * @return {Boolean} + * @typedef {Object} TarOptions + * @global + * @property {Boolean} [gzip=false] Compress the tar archive using gzip. + * @property {Object} [gzipOptions] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} + * to control compression. + * @property {*} [*] See [tar-stream]{@link https://github.com/mafintosh/tar-stream} documentation for additional properties. */ -Archiver.prototype._moduleSupports = function(key) { - if (!this._module.supports || !this._module.supports[key]) { - return false; - } - - return this._module.supports[key]; -}; /** - * Unpipes the module from our internal stream. - * - * @private - * @return void + * @typedef {Object} TarEntryData + * @global + * @property {String} name Sets the entry name including internal path. + * @property {(String|Date)} [date=NOW()] Sets the entry date. + * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. + * @property {String} [prefix] Sets a path prefix for the entry name. Useful + * when working with methods like `directory` or `glob`. + * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing + * for reduction of fs stat calls when stat data is already known. */ -Archiver.prototype._moduleUnpipe = function() { - this._module.unpipe(this); - this._state.modulePiped = false; -}; /** - * Normalizes entry data with fallbacks for key properties. - * - * @private - * @param {Object} data - * @param {fs.Stats} stats - * @return {Object} + * TarStream Module + * @external TarStream + * @see {@link https://github.com/mafintosh/tar-stream} */ -Archiver.prototype._normalizeEntryData = function(data, stats) { - data = util.defaults(data, { - type: 'file', - name: null, - date: null, - mode: null, - prefix: null, - sourcePath: null, - stats: false - }); - if (stats && data.stats === false) { - data.stats = stats; - } - var isDir = data.type === 'directory'; +/***/ }), - if (data.name) { - if (typeof data.prefix === 'string' && '' !== data.prefix) { - data.name = data.prefix + '/' + data.name; - data.prefix = null; - } +/***/ 43642: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - data.name = util.sanitizePath(data.name); +/** + * ZIP Format Plugin + * + * @module plugins/zip + * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} + * @copyright (c) 2012-2014 Chris Talkington, contributors. + */ +var engine = __nccwpck_require__(58314); +var util = __nccwpck_require__(76358); - if (data.type !== 'symlink' && data.name.slice(-1) === '/') { - isDir = true; - data.type = 'directory'; - } else if (isDir) { - data.name += '/'; - } +/** + * @constructor + * @param {ZipOptions} [options] + * @param {String} [options.comment] Sets the zip archive comment. + * @param {Boolean} [options.forceLocalTime=false] Forces the archive to contain local file times instead of UTC. + * @param {Boolean} [options.forceZip64=false] Forces the archive to contain ZIP64 headers. + * @param {Boolean} [options.namePrependSlash=false] Prepends a forward slash to archive file paths. + * @param {Boolean} [options.store=false] Sets the compression method to STORE. + * @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} + */ +var Zip = function(options) { + if (!(this instanceof Zip)) { + return new Zip(options); } - // 511 === 0777; 493 === 0755; 438 === 0666; 420 === 0644 - if (typeof data.mode === 'number') { - if (win32) { - data.mode &= 511; - } else { - data.mode &= 4095 - } - } else if (data.stats && data.mode === null) { - if (win32) { - data.mode = data.stats.mode & 511; - } else { - data.mode = data.stats.mode & 4095; - } - - // stat isn't reliable on windows; force 0755 for dir - if (win32 && isDir) { - data.mode = 493; - } - } else if (data.mode === null) { - data.mode = isDir ? 493 : 420; - } + options = this.options = util.defaults(options, { + comment: '', + forceUTC: false, + namePrependSlash: false, + store: false + }); - if (data.stats && data.date === null) { - data.date = data.stats.mtime; - } else { - data.date = util.dateify(data.date); - } + this.supports = { + directory: true, + symlink: true + }; - return data; + this.engine = new engine(options); }; /** - * Error listener that re-emits error on to our internal stream. - * - * @private - * @param {Error} err + * @param {(Buffer|Stream)} source + * @param {ZipEntryData} data + * @param {String} data.name Sets the entry name including internal path. + * @param {(String|Date)} [data.date=NOW()] Sets the entry date. + * @param {Number} [data.mode=D:0755/F:0644] Sets the entry permissions. + * @param {String} [data.prefix] Sets a path prefix for the entry name. Useful + * when working with methods like `directory` or `glob`. + * @param {fs.Stats} [data.stats] Sets the fs stat data for this entry allowing + * for reduction of fs stat calls when stat data is already known. + * @param {Boolean} [data.store=ZipOptions.store] Sets the compression method to STORE. + * @param {Function} callback * @return void */ -Archiver.prototype._onModuleError = function(err) { - /** - * @event Archiver#error - * @type {ErrorData} - */ - this.emit('error', err); +Zip.prototype.append = function(source, data, callback) { + this.engine.entry(source, data, callback); }; /** - * Checks the various state variables after queue has drained to determine if - * we need to `finalize`. - * - * @private * @return void */ -Archiver.prototype._onQueueDrain = function() { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - return; - } - - if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { - this._finalize(); - } +Zip.prototype.finalize = function() { + this.engine.finalize(); }; /** - * Appends each queue task to the module. - * - * @private - * @param {Object} task - * @param {Function} callback - * @return void + * @return this.engine */ -Archiver.prototype._onQueueTask = function(task, callback) { - var fullCallback = () => { - if(task.data.callback) { - task.data.callback(); - } - callback(); - } - - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - fullCallback(); - return; - } - - this._task = task; - this._moduleAppend(task.source, task.data, fullCallback); +Zip.prototype.on = function() { + return this.engine.on.apply(this.engine, arguments); }; /** - * Performs a file stat and reinjects the task back into the queue. - * - * @private - * @param {Object} task - * @param {Function} callback - * @return void + * @return this.engine */ -Archiver.prototype._onStatQueueTask = function(task, callback) { - if (this._state.finalizing || this._state.finalized || this._state.aborted) { - callback(); - return; - } - - fs.lstat(task.filepath, function(err, stats) { - if (this._state.aborted) { - setImmediate(callback); - return; - } - - if (err) { - this._entriesCount--; - - /** - * @event Archiver#warning - * @type {ErrorData} - */ - this.emit('warning', err); - setImmediate(callback); - return; - } - - task = this._updateQueueTaskWithStats(task, stats); - - if (task) { - if (stats.size) { - this._fsEntriesTotalBytes += stats.size; - } - - this._queue.push(task); - } - - setImmediate(callback); - }.bind(this)); +Zip.prototype.pipe = function() { + return this.engine.pipe.apply(this.engine, arguments); }; /** - * Unpipes the module and ends our internal stream. - * - * @private - * @return void + * @return this.engine */ -Archiver.prototype._shutdown = function() { - this._moduleUnpipe(); - this.end(); +Zip.prototype.unpipe = function() { + return this.engine.unpipe.apply(this.engine, arguments); }; +module.exports = Zip; + /** - * Tracks the bytes emitted by our internal stream. - * - * @private - * @param {Buffer} chunk - * @param {String} encoding - * @param {Function} callback - * @return void + * @typedef {Object} ZipOptions + * @global + * @property {String} [comment] Sets the zip archive comment. + * @property {Boolean} [forceLocalTime=false] Forces the archive to contain local file times instead of UTC. + * @property {Boolean} [forceZip64=false] Forces the archive to contain ZIP64 headers. + * @prpperty {Boolean} [namePrependSlash=false] Prepends a forward slash to archive file paths. + * @property {Boolean} [store=false] Sets the compression method to STORE. + * @property {Object} [zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} + * to control compression. + * @property {*} [*] See [zip-stream]{@link https://archiverjs.com/zip-stream/ZipStream.html} documentation for current list of properties. */ -Archiver.prototype._transform = function(chunk, encoding, callback) { - if (chunk) { - this._pointer += chunk.length; - } - callback(null, chunk); -}; +/** + * @typedef {Object} ZipEntryData + * @global + * @property {String} name Sets the entry name including internal path. + * @property {(String|Date)} [date=NOW()] Sets the entry date. + * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. + * @property {Boolean} [namePrependSlash=ZipOptions.namePrependSlash] Prepends a forward slash to archive file paths. + * @property {String} [prefix] Sets a path prefix for the entry name. Useful + * when working with methods like `directory` or `glob`. + * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing + * for reduction of fs stat calls when stat data is already known. + * @property {Boolean} [store=ZipOptions.store] Sets the compression method to STORE. + */ /** - * Updates and normalizes a queue task using stats data. - * - * @private - * @param {Object} task - * @param {fs.Stats} stats - * @return {Object} + * ZipStream Module + * @external ZipStream + * @see {@link https://www.archiverjs.com/zip-stream/ZipStream.html} */ -Archiver.prototype._updateQueueTaskWithStats = function(task, stats) { - if (stats.isFile()) { - task.data.type = 'file'; - task.data.sourceType = 'stream'; - task.source = util.lazyReadStream(task.filepath); - } else if (stats.isDirectory() && this._moduleSupports('directory')) { - task.data.name = util.trailingSlashIt(task.data.name); - task.data.type = 'directory'; - task.data.sourcePath = util.trailingSlashIt(task.filepath); - task.data.sourceType = 'buffer'; - task.source = Buffer.concat([]); - } else if (stats.isSymbolicLink() && this._moduleSupports('symlink')) { - var linkPath = fs.readlinkSync(task.filepath); - var dirName = path.dirname(task.filepath); - task.data.type = 'symlink'; - task.data.linkname = path.relative(dirName, path.resolve(dirName, linkPath)); - task.data.sourceType = 'buffer'; - task.source = Buffer.concat([]); - } else { - if (stats.isDirectory()) { - this.emit('warning', new ArchiverError('DIRECTORYNOTSUPPORTED', task.data)); - } else if (stats.isSymbolicLink()) { - this.emit('warning', new ArchiverError('SYMLINKNOTSUPPORTED', task.data)); - } else { - this.emit('warning', new ArchiverError('ENTRYNOTSUPPORTED', task.data)); - } - return null; - } - task.data = this._normalizeEntryData(task.data, stats); +/***/ }), - return task; -}; +/***/ 43667: +/***/ (function(__unused_webpack_module, exports) { -/** - * Aborts the archiving process, taking a best-effort approach, by: - * - * - removing any pending queue tasks - * - allowing any active queue workers to finish - * - detaching internal module pipes - * - ending both sides of the Transform stream - * - * It will NOT drain any remaining sources. - * - * @return {this} - */ -Archiver.prototype.abort = function() { - if (this._state.aborted || this._state.finalized) { - return this; - } - - this._abort(); - - return this; -}; - -/** - * Appends an input source (text string, buffer, or stream) to the instance. - * - * When the instance has received, processed, and emitted the input, the `entry` - * event is fired. - * - * @fires Archiver#entry - * @param {(Buffer|Stream|String)} source The input source. - * @param {EntryData} data See also {@link ZipEntryData} and {@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.append = function(source, data) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } +(function (global, factory) { + true ? factory(exports) : + 0; +})(this, (function (exports) { 'use strict'; - data = this._normalizeEntryData(data); + /** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ + function apply(fn, ...args) { + return (...callArgs) => fn(...args,...callArgs); + } - if (typeof data.name !== 'string' || data.name.length === 0) { - this.emit('error', new ArchiverError('ENTRYNAMEREQUIRED')); - return this; - } + function initialParams (fn) { + return function (...args/*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; + } - if (data.type === 'directory' && !this._moduleSupports('directory')) { - this.emit('error', new ArchiverError('DIRECTORYNOTSUPPORTED', { name: data.name })); - return this; - } + /* istanbul ignore file */ - source = util.normalizeInputSource(source); + var hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; + var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; + var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; - if (Buffer.isBuffer(source)) { - data.sourceType = 'buffer'; - } else if (util.isStream(source)) { - data.sourceType = 'stream'; - } else { - this.emit('error', new ArchiverError('INPUTSTEAMBUFFERREQUIRED', { name: data.name })); - return this; - } + function fallback(fn) { + setTimeout(fn, 0); + } - this._entriesCount++; - this._queue.push({ - data: data, - source: source - }); + function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); + } - return this; -}; + var _defer$1; -/** - * Appends a directory and its files, recursively, given its dirpath. - * - * @param {String} dirpath The source directory path. - * @param {String} destpath The destination path within the archive. - * @param {(EntryData|Function)} data See also [ZipEntryData]{@link ZipEntryData} and - * [TarEntryData]{@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.directory = function(dirpath, destpath, data) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } + if (hasQueueMicrotask) { + _defer$1 = queueMicrotask; + } else if (hasSetImmediate) { + _defer$1 = setImmediate; + } else if (hasNextTick) { + _defer$1 = process.nextTick; + } else { + _defer$1 = fallback; + } - if (typeof dirpath !== 'string' || dirpath.length === 0) { - this.emit('error', new ArchiverError('DIRECTORYDIRPATHREQUIRED')); - return this; - } + var setImmediate$1 = wrap(_defer$1); - this._pending++; + /** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ + function asyncify(func) { + if (isAsync(func)) { + return function (...args/*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback) + } + } - if (destpath === false) { - destpath = ''; - } else if (typeof destpath !== 'string'){ - destpath = dirpath; - } + return initialParams(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback) + } else { + callback(null, result); + } + }); + } - var dataFunction = false; - if (typeof data === 'function') { - dataFunction = data; - data = {}; - } else if (typeof data !== 'object') { - data = {}; - } + function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && (err instanceof Error || err.message) ? err : new Error(err)); + }); + } - var globOptions = { - stat: true, - dot: true - }; + function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + setImmediate$1(e => { throw e }, err); + } + } - function onGlobEnd() { - this._pending--; - this._maybeFinalize(); - } + function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; + } - function onGlobError(err) { - this.emit('error', err); - } + function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; + } - function onGlobMatch(match){ - globber.pause(); + function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; + } - var ignoreMatch = false; - var entryData = Object.assign({}, data); - entryData.name = match.relative; - entryData.prefix = destpath; - entryData.stats = match.stat; - entryData.callback = globber.resume.bind(globber); + function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function') + return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; + } - try { - if (dataFunction) { - entryData = dataFunction(entryData); + // conditionally promisify a function. + // only return a promise if a callback is omitted + function awaitify (asyncFn, arity) { + if (!arity) arity = asyncFn.length; + if (!arity) throw new Error('arity is undefined') + function awaitable (...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args) + } - if (entryData === false) { - ignoreMatch = true; - } else if (typeof entryData !== 'object') { - throw new ArchiverError('DIRECTORYFUNCTIONINVALIDDATA', { dirpath: dirpath }); + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err) + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }) } - } - } catch(e) { - this.emit('error', e); - return; - } - if (ignoreMatch) { - globber.resume(); - return; + return awaitable } - this._append(match.absolute, entryData); - } + function applyEach$1 (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = awaitify(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + wrapAsync(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; + } - var globber = glob(dirpath, globOptions); - globber.on('error', onGlobError.bind(this)); - globber.on('match', onGlobMatch.bind(this)); - globber.on('end', onGlobEnd.bind(this)); + function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = wrapAsync(iteratee); - return this; -}; + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); + } -/** - * Appends a file given its filepath using a - * [lazystream]{@link https://github.com/jpommerening/node-lazystream} wrapper to - * prevent issues with open file limits. - * - * When the instance has received, processed, and emitted the file, the `entry` - * event is fired. - * - * @param {String} filepath The source filepath. - * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and - * [TarEntryData]{@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.file = function(filepath, data) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } + function isArrayLike(value) { + return value && + typeof value.length === 'number' && + value.length >= 0 && + value.length % 1 === 0; + } - if (typeof filepath !== 'string' || filepath.length === 0) { - this.emit('error', new ArchiverError('FILEFILEPATHREQUIRED')); - return this; - } + // A temporary value used to identify if the loop should be broken. + // See #1064, #1293 + const breakLoop = {}; + var breakLoop$1 = breakLoop; - this._append(filepath, data); + function once(fn) { + function wrapper (...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper + } - return this; -}; + function getIterator (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); + } -/** - * Appends multiple files that match a glob pattern. - * - * @param {String} pattern The [glob pattern]{@link https://github.com/isaacs/minimatch} to match. - * @param {Object} options See [node-readdir-glob]{@link https://github.com/yqnn/node-readdir-glob#options}. - * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and - * [TarEntryData]{@link TarEntryData}. - * @return {this} - */ -Archiver.prototype.glob = function(pattern, options, data) { - this._pending++; + function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? {value: coll[i], key: i} : null; + } + } - options = util.defaults(options, { - stat: true, - pattern: pattern - }); + function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) + return null; + i++; + return {value: item.value, key: i}; + } + } - function onGlobEnd() { - this._pending--; - this._maybeFinalize(); - } + function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + if (key === '__proto__') { + return next(); + } + return i < len ? {value: obj[key], key} : null; + }; + } - function onGlobError(err) { - this.emit('error', err); - } + function createIterator(coll) { + if (isArrayLike(coll)) { + return createArrayIterator(coll); + } - function onGlobMatch(match){ - globber.pause(); - var entryData = Object.assign({}, data); - entryData.callback = globber.resume.bind(globber); - entryData.stats = match.stat; - entryData.name = match.relative; + var iterator = getIterator(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); + } - this._append(match.absolute, entryData); - } + function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; + } - var globber = glob(options.cwd || '.', options); - globber.on('error', onGlobError.bind(this)); - globber.on('match', onGlobMatch.bind(this)); - globber.on('end', onGlobEnd.bind(this)); + // for async generators + function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; - return this; -}; + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({value, done: iterDone}) => { + //console.log('got value', value) + if (canceled || done) return + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } -/** - * Finalizes the instance and prevents further appending to the archive - * structure (queue will continue til drained). - * - * The `end`, `close` or `finish` events on the destination stream may fire - * right after calling this method so you should set listeners beforehand to - * properly detect stream completion. - * - * @return {Promise} - */ -Archiver.prototype.finalize = function() { - if (this._state.aborted) { - var abortedError = new ArchiverError('ABORTED'); - this.emit('error', abortedError); - return Promise.reject(abortedError); - } + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return + if (err) return handleError(err) - if (this._state.finalize) { - var finalizingError = new ArchiverError('FINALIZING'); - this.emit('error', finalizingError); - return Promise.reject(finalizingError); - } + if (err === false) { + done = true; + canceled = true; + return + } - this._state.finalize = true; + if (result === breakLoop$1 || (done && running <= 0)) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } - if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { - this._finalize(); - } + function handleError(err) { + if (canceled) return + awaiting = false; + done = true; + callback(err); + } - var self = this; + replenish(); + } - return new Promise(function(resolve, reject) { - var errored; + var eachOfLimit$2 = (limit) => { + return (obj, iteratee, callback) => { + callback = once(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1') + } + if (!obj) { + return callback(null); + } + if (isAsyncGenerator(obj)) { + return asyncEachOfLimit(obj, limit, iteratee, callback) + } + if (isAsyncIterable(obj)) { + return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) + } + var nextElem = createIterator(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; - self._module.on('end', function() { - if (!errored) { - resolve(); - } - }) + function iterateeCallback(err, value) { + if (canceled) return + running -= 1; + if (err) { + done = true; + callback(err); + } + else if (err === false) { + done = true; + canceled = true; + } + else if (value === breakLoop$1 || (done && running <= 0)) { + done = true; + return callback(null); + } + else if (!looping) { + replenish(); + } + } - self._module.on('error', function(err) { - errored = true; - reject(err); - }) - }) -}; + function replenish () { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); + } + looping = false; + } -/** - * Sets the module format name used for archiving. - * - * @param {String} format The name of the format. - * @return {this} - */ -Archiver.prototype.setFormat = function(format) { - if (this._format) { - this.emit('error', new ArchiverError('FORMATSET')); - return this; - } + replenish(); + }; + }; - this._format = format; + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfLimit(coll, limit, iteratee, callback) { + return eachOfLimit$2(limit)(coll, wrapAsync(iteratee), callback); + } - return this; -}; + var eachOfLimit$1 = awaitify(eachOfLimit, 4); -/** - * Sets the module used for archiving. - * - * @param {Function} module The function for archiver to interact with. - * @return {this} - */ -Archiver.prototype.setModule = function(module) { - if (this._state.aborted) { - this.emit('error', new ArchiverError('ABORTED')); - return this; - } + // eachOf implementation optimized for array-likes + function eachOfArrayLike(coll, iteratee, callback) { + callback = once(callback); + var index = 0, + completed = 0, + {length} = coll, + canceled = false; + if (length === 0) { + callback(null); + } - if (this._state.module) { - this.emit('error', new ArchiverError('MODULESET')); - return this; - } + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return + if (err) { + callback(err); + } else if ((++completed === length) || value === breakLoop$1) { + callback(null); + } + } - this._module = module; - this._modulePipe(); + for (; index < length; index++) { + iteratee(coll[index], index, onlyOnce(iteratorCallback)); + } + } - return this; -}; + // a generic version of eachOf which can handle array, object, and iterator cases. + function eachOfGeneric (coll, iteratee, callback) { + return eachOfLimit$1(coll, Infinity, iteratee, callback); + } -/** - * Appends a symlink to the instance. - * - * This does NOT interact with filesystem and is used for programmatically creating symlinks. - * - * @param {String} filepath The symlink path (within archive). - * @param {String} target The target path (within archive). - * @param {Number} mode Sets the entry permissions. - * @return {this} - */ -Archiver.prototype.symlink = function(filepath, target, mode) { - if (this._state.finalize || this._state.aborted) { - this.emit('error', new ArchiverError('QUEUECLOSED')); - return this; - } + /** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ + function eachOf(coll, iteratee, callback) { + var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, wrapAsync(iteratee), callback); + } - if (typeof filepath !== 'string' || filepath.length === 0) { - this.emit('error', new ArchiverError('SYMLINKFILEPATHREQUIRED')); - return this; - } + var eachOf$1 = awaitify(eachOf, 3); - if (typeof target !== 'string' || target.length === 0) { - this.emit('error', new ArchiverError('SYMLINKTARGETREQUIRED', { filepath: filepath })); - return this; - } + /** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callbacks + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.map(fileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.map(fileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.map(fileList, getFileSizeInBytes); + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function map (coll, iteratee, callback) { + return _asyncMap(eachOf$1, coll, iteratee, callback) + } + var map$1 = awaitify(map, 3); - if (!this._moduleSupports('symlink')) { - this.emit('error', new ArchiverError('SYMLINKNOTSUPPORTED', { filepath: filepath })); - return this; - } + /** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ + var applyEach = applyEach$1(map$1); - var data = {}; - data.type = 'symlink'; - data.name = filepath.replace(/\\/g, '/'); - data.linkname = target.replace(/\\/g, '/'); - data.sourceType = 'buffer'; - - if (typeof mode === "number") { - data.mode = mode; - } - - this._entriesCount++; - this._queue.push({ - data: data, - source: Buffer.concat([]) - }); - - return this; -}; + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfSeries(coll, iteratee, callback) { + return eachOfLimit$1(coll, 1, iteratee, callback) + } + var eachOfSeries$1 = awaitify(eachOfSeries, 3); -/** - * Returns the current length (in bytes) that has been emitted. - * - * @return {Number} - */ -Archiver.prototype.pointer = function() { - return this._pointer; -}; + /** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapSeries (coll, iteratee, callback) { + return _asyncMap(eachOfSeries$1, coll, iteratee, callback) + } + var mapSeries$1 = awaitify(mapSeries, 3); -/** - * Middleware-like helper that has yet to be fully implemented. - * - * @private - * @param {Function} plugin - * @return {this} - */ -Archiver.prototype.use = function(plugin) { - this._streams.push(plugin); - return this; -}; + /** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ + var applyEachSeries = applyEach$1(mapSeries$1); -module.exports = Archiver; + const PROMISE_SYMBOL = Symbol('promiseCallback'); -/** - * @typedef {Object} CoreOptions - * @global - * @property {Number} [statConcurrency=4] Sets the number of workers used to - * process the internal fs stat queue. - */ + function promiseCallback () { + let resolve, reject; + function callback (err, ...args) { + if (err) return reject(err) + resolve(args.length > 1 ? args : args[0]); + } -/** - * @typedef {Object} TransformOptions - * @property {Boolean} [allowHalfOpen=true] If set to false, then the stream - * will automatically end the readable side when the writable side ends and vice - * versa. - * @property {Boolean} [readableObjectMode=false] Sets objectMode for readable - * side of the stream. Has no effect if objectMode is true. - * @property {Boolean} [writableObjectMode=false] Sets objectMode for writable - * side of the stream. Has no effect if objectMode is true. - * @property {Boolean} [decodeStrings=true] Whether or not to decode strings - * into Buffers before passing them to _write(). `Writable` - * @property {String} [encoding=NULL] If specified, then buffers will be decoded - * to strings using the specified encoding. `Readable` - * @property {Number} [highWaterMark=16kb] The maximum number of bytes to store - * in the internal buffer before ceasing to read from the underlying resource. - * `Readable` `Writable` - * @property {Boolean} [objectMode=false] Whether this stream should behave as a - * stream of objects. Meaning that stream.read(n) returns a single value instead - * of a Buffer of size n. `Readable` `Writable` - */ + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, + reject = rej; + }); -/** - * @typedef {Object} EntryData - * @property {String} name Sets the entry name including internal path. - * @property {(String|Date)} [date=NOW()] Sets the entry date. - * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. - * @property {String} [prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - */ + return callback + } -/** - * @typedef {Object} ErrorData - * @property {String} message The message of the error. - * @property {String} code The error code assigned to this error. - * @property {String} data Additional data provided for reporting or debugging (where available). - */ + /** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * //Using Callbacks + * async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * if (err) { + * console.log('err = ', err); + * } + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }); + * + * //Using Promises + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }).then(results => { + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }).catch(err => { + * console.log('err = ', err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }); + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = once(callback || promiseCallback()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } -/** - * @typedef {Object} ProgressData - * @property {Object} entries - * @property {Number} entries.total Number of entries that have been appended. - * @property {Number} entries.processed Number of entries that have been processed. - * @property {Object} fs - * @property {Number} fs.totalBytes Number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) - * @property {Number} fs.processedBytes Number of bytes that have been processed. (based on fs.Stats) - */ + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; + var listeners = Object.create(null); -/***/ }), + var readyTasks = []; -/***/ 96479: -/***/ ((module, exports, __nccwpck_require__) => { + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; -/** - * Archiver Core - * - * @ignore - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } -var util = __nccwpck_require__(73837); + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; -const ERROR_CODES = { - 'ABORTED': 'archive was aborted', - 'DIRECTORYDIRPATHREQUIRED': 'diretory dirpath argument must be a non-empty string value', - 'DIRECTORYFUNCTIONINVALIDDATA': 'invalid data returned by directory custom data function', - 'ENTRYNAMEREQUIRED': 'entry name must be a non-empty string value', - 'FILEFILEPATHREQUIRED': 'file filepath argument must be a non-empty string value', - 'FINALIZING': 'archive already finalizing', - 'QUEUECLOSED': 'queue closed', - 'NOENDMETHOD': 'no suitable finalize/end method defined by module', - 'DIRECTORYNOTSUPPORTED': 'support for directory entries not defined by module', - 'FORMATSET': 'archive format already set', - 'INPUTSTEAMBUFFERREQUIRED': 'input source must be valid Stream or Buffer instance', - 'MODULESET': 'module already set', - 'SYMLINKNOTSUPPORTED': 'support for symlink entries not defined by module', - 'SYMLINKFILEPATHREQUIRED': 'symlink filepath argument must be a non-empty string value', - 'SYMLINKTARGETREQUIRED': 'symlink target argument must be a non-empty string value', - 'ENTRYNOTSUPPORTED': 'entry not supported' -}; + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + + '` has a non-existent dependency `' + + dependencyName + '` in ' + + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); -function ArchiverError(code, data) { - Error.captureStackTrace(this, this.constructor); - //this.name = this.constructor.name; - this.message = ERROR_CODES[code] || code; - this.code = code; - this.data = data; -} + checkForDeadlocks(); + processQueue(); -util.inherits(ArchiverError, Error); + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } -exports = module.exports = ArchiverError; + function processQueue() { + if (canceled) return + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while(readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } -/***/ }), + } -/***/ 28814: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } -/** - * JSON Format Plugin - * - * @module plugins/json - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var inherits = (__nccwpck_require__(73837).inherits); -var Transform = (__nccwpck_require__(92567).Transform); + taskListeners.push(fn); + } -var crc32 = __nccwpck_require__(44985); -var util = __nccwpck_require__(6582); + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } -/** - * @constructor - * @param {(JsonOptions|TransformOptions)} options - */ -var Json = function(options) { - if (!(this instanceof Json)) { - return new Json(options); - } - options = this.options = util.defaults(options, {}); + function runTask(key, task) { + if (hasError) return; - Transform.call(this, options); + var taskCallback = onlyOnce((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); - this.supports = { - directory: true, - symlink: true - }; + runningTasks++; + var taskFn = wrapAsync(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } - this.files = []; -}; + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } -inherits(Json, Transform); + if (counter !== numTasks) { + throw new Error( + 'async.auto cannot execute tasks due to a recursive dependency' + ); + } + } -/** - * [_transform description] - * - * @private - * @param {Buffer} chunk - * @param {String} encoding - * @param {Function} callback - * @return void - */ -Json.prototype._transform = function(chunk, encoding, callback) { - callback(null, chunk); -}; + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } -/** - * [_writeStringified description] - * - * @private - * @return void - */ -Json.prototype._writeStringified = function() { - var fileString = JSON.stringify(this.files); - this.write(fileString); -}; + return callback[PROMISE_SYMBOL] + } -/** - * [append description] - * - * @param {(Buffer|Stream)} source - * @param {EntryData} data - * @param {Function} callback - * @return void - */ -Json.prototype.append = function(source, data, callback) { - var self = this; + var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; + var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; + var FN_ARG_SPLIT = /,/; + var FN_ARG = /(=.+)?(\s*)$/; - data.crc32 = 0; + function stripComments(string) { + let stripped = ''; + let index = 0; + let endBlockComment = string.indexOf('*/'); + while (index < string.length) { + if (string[index] === '/' && string[index+1] === '/') { + // inline comment + let endIndex = string.indexOf('\n', index); + index = (endIndex === -1) ? string.length : endIndex; + } else if ((endBlockComment !== -1) && (string[index] === '/') && (string[index+1] === '*')) { + // block comment + let endIndex = string.indexOf('*/', index); + if (endIndex !== -1) { + index = endIndex + 2; + endBlockComment = string.indexOf('*/', index); + } else { + stripped += string[index]; + index++; + } + } else { + stripped += string[index]; + index++; + } + } + return stripped; + } - function onend(err, sourceBuffer) { - if (err) { - callback(err); - return; + function parseParams(func) { + const src = stripComments(func.toString()); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) + let [, args] = match; + return args + .replace(/\s/g, '') + .split(FN_ARG_SPLIT) + .map((arg) => arg.replace(FN_ARG, '').trim()); } - data.size = sourceBuffer.length || 0; - data.crc32 = crc32.unsigned(sourceBuffer); + /** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ + function autoInject(tasks, callback) { + var newTasks = {}; - self.files.push(data); + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = isAsync(taskFn); + var hasNoDeps = + (!fnIsAsync && taskFn.length === 1) || + (fnIsAsync && taskFn.length === 0); - callback(null, data); - } + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); - if (data.sourceType === 'buffer') { - onend(null, source); - } else if (data.sourceType === 'stream') { - util.collectStream(source, onend); - } -}; + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } -/** - * [finalize description] - * - * @return void - */ -Json.prototype.finalize = function() { - this._writeStringified(); - this.end(); -}; + // remove callback param + if (!fnIsAsync) params.pop(); -module.exports = Json; + newTasks[key] = params.concat(newTask); + } -/** - * @typedef {Object} JsonOptions - * @global - */ + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + wrapAsync(taskFn)(...newArgs); + } + }); + return auto(newTasks, callback); + } -/***/ }), + // Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation + // used for queues. This implementation assumes that the node provided by the user can be modified + // to adjust the next and last properties. We implement only the minimal functionality + // for queue support. + class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } -/***/ 2814: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + removeLink(node) { + if (node.prev) node.prev.next = node.next; + else this.head = node.next; + if (node.next) node.next.prev = node.prev; + else this.tail = node.prev; -/** - * TAR Format Plugin - * - * @module plugins/tar - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var zlib = __nccwpck_require__(59796); + node.prev = node.next = null; + this.length -= 1; + return node; + } -var engine = __nccwpck_require__(8906); -var util = __nccwpck_require__(6582); + empty () { + while(this.head) this.shift(); + return this; + } -/** - * @constructor - * @param {TarOptions} options - */ -var Tar = function(options) { - if (!(this instanceof Tar)) { - return new Tar(options); - } + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode; + else this.tail = newNode; + node.next = newNode; + this.length += 1; + } - options = this.options = util.defaults(options, { - gzip: false - }); + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode; + else this.head = newNode; + node.prev = newNode; + this.length += 1; + } - if (typeof options.gzipOptions !== 'object') { - options.gzipOptions = {}; - } + unshift(node) { + if (this.head) this.insertBefore(this.head, node); + else setInitial(this, node); + } - this.supports = { - directory: true, - symlink: true - }; + push(node) { + if (this.tail) this.insertAfter(this.tail, node); + else setInitial(this, node); + } - this.engine = engine.pack(options); - this.compressor = false; + shift() { + return this.head && this.removeLink(this.head); + } - if (options.gzip) { - this.compressor = zlib.createGzip(options.gzipOptions); - this.compressor.on('error', this._onCompressorError.bind(this)); - } -}; + pop() { + return this.tail && this.removeLink(this.tail); + } -/** - * [_onCompressorError description] - * - * @private - * @param {Error} err - * @return void - */ -Tar.prototype._onCompressorError = function(err) { - this.engine.emit('error', err); -}; + toArray() { + return [...this] + } -/** - * [append description] - * - * @param {(Buffer|Stream)} source - * @param {TarEntryData} data - * @param {Function} callback - * @return void - */ -Tar.prototype.append = function(source, data, callback) { - var self = this; + *[Symbol.iterator] () { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } - data.mtime = data.date; + remove (testFn) { + var curr = this.head; + while(curr) { + var {next} = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } + } - function append(err, sourceBuffer) { - if (err) { - callback(err); - return; + function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; } - self.engine.entry(data, sourceBuffer, function(err) { - callback(err, data); - }); - } + function queue$1(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } - if (data.sourceType === 'buffer') { - append(null, source); - } else if (data.sourceType === 'stream' && data.stats) { - data.size = data.stats.size; + var _worker = wrapAsync(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; - var entry = self.engine.entry(data, function(err) { - callback(err, data); - }); + function on (event, handler) { + events[event].push(handler); + } - source.pipe(entry); - } else if (data.sourceType === 'stream') { - util.collectStream(source, append); - } -}; + function once (event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } -/** - * [finalize description] - * - * @return void - */ -Tar.prototype.finalize = function() { - this.engine.finalize(); -}; + function off (event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []) + if (!handler) return events[event] = [] + events[event] = events[event].filter(ev => ev !== handler); + } -/** - * [on description] - * - * @return this.engine - */ -Tar.prototype.on = function() { - return this.engine.on.apply(this.engine, arguments); -}; + function trigger (event, ...args) { + events[event].forEach(handler => handler(...args)); + } -/** - * [pipe description] - * - * @param {String} destination - * @param {Object} options - * @return this.engine - */ -Tar.prototype.pipe = function(destination, options) { - if (this.compressor) { - return this.engine.pipe.apply(this.engine, [this.compressor]).pipe(destination, options); - } else { - return this.engine.pipe.apply(this.engine, arguments); - } -}; + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; -/** - * [unpipe description] - * - * @return this.engine - */ -Tar.prototype.unpipe = function() { - if (this.compressor) { - return this.compressor.unpipe.apply(this.compressor, arguments); - } else { - return this.engine.unpipe.apply(this.engine, arguments); - } -}; + var res, rej; + function promiseCallback (err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res() + if (args.length <= 1) return res(args[0]) + res(args); + } -module.exports = Tar; + var item = q._createTaskItem( + data, + rejectOnError ? promiseCallback : + (callback || promiseCallback) + ); -/** - * @typedef {Object} TarOptions - * @global - * @property {Boolean} [gzip=false] Compress the tar archive using gzip. - * @property {Object} [gzipOptions] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} - * to control compression. - * @property {*} [*] See [tar-stream]{@link https://github.com/mafintosh/tar-stream} documentation for additional properties. - */ + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } -/** - * @typedef {Object} TarEntryData - * @global - * @property {String} name Sets the entry name including internal path. - * @property {(String|Date)} [date=NOW()] Sets the entry date. - * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. - * @property {String} [prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - */ + if (!processingScheduled) { + processingScheduled = true; + setImmediate$1(() => { + processingScheduled = false; + q.process(); + }); + } -/** - * TarStream Module - * @external TarStream - * @see {@link https://github.com/mafintosh/tar-stream} - */ - - -/***/ }), - -/***/ 95653: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -/** - * ZIP Format Plugin - * - * @module plugins/zip - * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} - * @copyright (c) 2012-2014 Chris Talkington, contributors. - */ -var engine = __nccwpck_require__(626); -var util = __nccwpck_require__(6582); - -/** - * @constructor - * @param {ZipOptions} [options] - * @param {String} [options.comment] Sets the zip archive comment. - * @param {Boolean} [options.forceLocalTime=false] Forces the archive to contain local file times instead of UTC. - * @param {Boolean} [options.forceZip64=false] Forces the archive to contain ZIP64 headers. - * @param {Boolean} [options.namePrependSlash=false] Prepends a forward slash to archive file paths. - * @param {Boolean} [options.store=false] Sets the compression method to STORE. - * @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} - */ -var Zip = function(options) { - if (!(this instanceof Zip)) { - return new Zip(options); - } - - options = this.options = util.defaults(options, { - comment: '', - forceUTC: false, - namePrependSlash: false, - store: false - }); + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }) + } + } - this.supports = { - directory: true, - symlink: true - }; + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; - this.engine = new engine(options); -}; + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; -/** - * @param {(Buffer|Stream)} source - * @param {ZipEntryData} data - * @param {String} data.name Sets the entry name including internal path. - * @param {(String|Date)} [data.date=NOW()] Sets the entry date. - * @param {Number} [data.mode=D:0755/F:0644] Sets the entry permissions. - * @param {String} [data.prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @param {fs.Stats} [data.stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - * @param {Boolean} [data.store=ZipOptions.store] Sets the compression method to STORE. - * @param {Function} callback - * @return void - */ -Zip.prototype.append = function(source, data, callback) { - this.engine.entry(source, data, callback); -}; + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } -/** - * @return void - */ -Zip.prototype.finalize = function() { - this.engine.finalize(); -}; + task.callback(err, ...args); -/** - * @return this.engine - */ -Zip.prototype.on = function() { - return this.engine.on.apply(this.engine, arguments); -}; + if (err != null) { + trigger('error', err, task.data); + } + } -/** - * @return this.engine - */ -Zip.prototype.pipe = function() { - return this.engine.pipe.apply(this.engine, arguments); -}; + if (numRunning <= (q.concurrency - q.buffer) ) { + trigger('unsaturated'); + } -/** - * @return this.engine - */ -Zip.prototype.unpipe = function() { - return this.engine.unpipe.apply(this.engine, arguments); -}; + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } -module.exports = Zip; + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + setImmediate$1(() => trigger('drain')); + return true + } + return false + } -/** - * @typedef {Object} ZipOptions - * @global - * @property {String} [comment] Sets the zip archive comment. - * @property {Boolean} [forceLocalTime=false] Forces the archive to contain local file times instead of UTC. - * @property {Boolean} [forceZip64=false] Forces the archive to contain ZIP64 headers. - * @prpperty {Boolean} [namePrependSlash=false] Prepends a forward slash to archive file paths. - * @property {Boolean} [store=false] Sets the compression method to STORE. - * @property {Object} [zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} - * to control compression. - * @property {*} [*] See [zip-stream]{@link https://archiverjs.com/zip-stream/ZipStream.html} documentation for current list of properties. - */ + const eventMethod = (name) => (handler) => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err) + resolve(data); + }); + }) + } + off(name); + on(name, handler); -/** - * @typedef {Object} ZipEntryData - * @global - * @property {String} name Sets the entry name including internal path. - * @property {(String|Date)} [date=NOW()] Sets the entry date. - * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. - * @property {Boolean} [namePrependSlash=ZipOptions.namePrependSlash] Prepends a forward slash to archive file paths. - * @property {String} [prefix] Sets a path prefix for the entry name. Useful - * when working with methods like `directory` or `glob`. - * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing - * for reduction of fs stat calls when stat data is already known. - * @property {Boolean} [store=ZipOptions.store] Sets the compression method to STORE. - */ + }; -/** - * ZipStream Module - * @external ZipStream - * @see {@link https://www.archiverjs.com/zip-stream/ZipStream.html} - */ + var isProcessing = false; + var q = { + _tasks: new DLL(), + _createTaskItem (data, callback) { + return { + data, + callback + }; + }, + *[Symbol.iterator] () { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, false, callback)) + } + return _insert(data, false, false, callback); + }, + pushAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, true, callback)) + } + return _insert(data, false, true, callback); + }, + kill () { + off(); + q._tasks.empty(); + }, + unshift (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, false, callback)) + } + return _insert(data, true, false, callback); + }, + unshiftAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, true, callback)) + } + return _insert(data, true, true, callback); + }, + remove (testFn) { + q._tasks.remove(testFn); + }, + process () { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while(!q.paused && numRunning < q.concurrency && q._tasks.length){ + var tasks = [], data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } + numRunning += 1; -/***/ }), + if (q._tasks.length === 0) { + trigger('empty'); + } -/***/ 91788: -/***/ (function(__unused_webpack_module, exports) { + if (numRunning === q.concurrency) { + trigger('saturated'); + } -(function (global, factory) { - true ? factory(exports) : - 0; -})(this, (function (exports) { 'use strict'; + var cb = onlyOnce(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length () { + return q._tasks.length; + }, + running () { + return numRunning; + }, + workersList () { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause () { + q.paused = true; + }, + resume () { + if (q.paused === false) { return; } + q.paused = false; + setImmediate$1(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + }, + }); + return q; + } /** - * Creates a continuation function with some arguments already applied. + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. * - * Useful as a shorthand when combined with other control flow functions. Any - * arguments passed to the returned function are added to the arguments - * originally passed to apply. + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. * - * @name apply + * @name cargo * @static - * @memberOf module:Utils + * @memberOf module:ControlFlow * @method - * @category Util - * @param {Function} fn - The function you want to eventually apply all - * arguments to. Invokes with (arguments...). - * @param {...*} arguments... - Any number of arguments to automatically apply - * when the continuation is called. - * @returns {Function} the partially-applied function + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. * @example * - * // using apply - * async.parallel([ - * async.apply(fs.writeFile, 'testfile1', 'test1'), - * async.apply(fs.writeFile, 'testfile2', 'test2') - * ]); - * - * - * // the same process without using apply - * async.parallel([ - * function(callback) { - * fs.writeFile('testfile1', 'test1', callback); - * }, - * function(callback) { - * fs.writeFile('testfile2', 'test2', callback); + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i var fn = async.apply(sys.puts, 'one'); - * node> fn('two', 'three'); - * one - * two - * three + * // add some items + * cargo.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * cargo.push({name: 'bar'}, function(err) { + * console.log('finished processing bar'); + * }); + * await cargo.push({name: 'baz'}); + * console.log('finished processing baz'); */ - function apply(fn, ...args) { - return (...callArgs) => fn(...args,...callArgs); - } - - function initialParams (fn) { - return function (...args/*, callback*/) { - var callback = args.pop(); - return fn.call(this, args, callback); - }; - } - - /* istanbul ignore file */ - - var hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; - var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; - var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; - - function fallback(fn) { - setTimeout(fn, 0); - } - - function wrap(defer) { - return (fn, ...args) => defer(() => fn(...args)); - } - - var _defer$1; - - if (hasQueueMicrotask) { - _defer$1 = queueMicrotask; - } else if (hasSetImmediate) { - _defer$1 = setImmediate; - } else if (hasNextTick) { - _defer$1 = process.nextTick; - } else { - _defer$1 = fallback; + function cargo$1(worker, payload) { + return queue$1(worker, 1, payload); } - var setImmediate$1 = wrap(_defer$1); - /** - * Take a sync function and make it async, passing its return value to a - * callback. This is useful for plugging sync functions into a waterfall, - * series, or other async functions. Any arguments passed to the generated - * function will be passed to the wrapped function (except for the final - * callback argument). Errors thrown will be passed to the callback. - * - * If the function passed to `asyncify` returns a Promise, that promises's - * resolved/rejected state will be used to call the callback, rather than simply - * the synchronous return value. + * Creates a `cargoQueue` object with the specified payload. Tasks added to the + * cargoQueue will be processed together (up to the `payload` limit) in `concurrency` parallel workers. + * If the all `workers` are in progress, the task is queued until one becomes available. Once + * a `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. * - * This also means you can asyncify ES2017 `async` functions. + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, and [`cargo`]{@link module:ControlFlow.cargo} passes an array of tasks to a single worker, + * the cargoQueue passes an array of tasks to multiple parallel workers. * - * @name asyncify + * @name cargoQueue * @static - * @memberOf module:Utils + * @memberOf module:ControlFlow * @method - * @alias wrapSync - * @category Util - * @param {Function} func - The synchronous function, or Promise-returning - * function to convert to an {@link AsyncFunction}. - * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be - * invoked with `(args..., callback)`. + * @see [async.queue]{@link module:ControlFlow.queue} + * @see [async.cargo]{@link module:ControlFLow.cargo} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargoQueue object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargoQueue and inner queue. * @example * - * // passing a regular synchronous function - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(JSON.parse), - * function (data, next) { - * // data is the result of parsing the text. - * // If there was a parsing error, it would have been caught. - * } - * ], callback); - * - * // passing a function returning a promise - * async.waterfall([ - * async.apply(fs.readFile, filename, "utf8"), - * async.asyncify(function (contents) { - * return db.model.create(contents); - * }), - * function (model, next) { - * // `model` is the instantiated model object. - * // If there was an error, this function would be skipped. + * // create a cargoQueue object with payload 2 and concurrency 2 + * var cargoQueue = async.cargoQueue(function(tasks, callback) { + * for (var i=0; i { - invokeCallback(callback, null, value); - }, err => { - invokeCallback(callback, err && (err instanceof Error || err.message) ? err : new Error(err)); - }); - } - - function invokeCallback(callback, error, value) { - try { - callback(error, value); - } catch (err) { - setImmediate$1(e => { throw e }, err); - } - } - - function isAsync(fn) { - return fn[Symbol.toStringTag] === 'AsyncFunction'; - } - - function isAsyncGenerator(fn) { - return fn[Symbol.toStringTag] === 'AsyncGenerator'; - } - - function isAsyncIterable(obj) { - return typeof obj[Symbol.asyncIterator] === 'function'; - } - - function wrapAsync(asyncFn) { - if (typeof asyncFn !== 'function') throw new Error('expected a function') - return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; - } - - // conditionally promisify a function. - // only return a promise if a callback is omitted - function awaitify (asyncFn, arity) { - if (!arity) arity = asyncFn.length; - if (!arity) throw new Error('arity is undefined') - function awaitable (...args) { - if (typeof args[arity - 1] === 'function') { - return asyncFn.apply(this, args) - } - - return new Promise((resolve, reject) => { - args[arity - 1] = (err, ...cbArgs) => { - if (err) return reject(err) - resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); - }; - asyncFn.apply(this, args); - }) - } - - return awaitable - } - - function applyEach$1 (eachfn) { - return function applyEach(fns, ...callArgs) { - const go = awaitify(function (callback) { - var that = this; - return eachfn(fns, (fn, cb) => { - wrapAsync(fn).apply(that, callArgs.concat(cb)); - }, callback); - }); - return go; - }; - } - - function _asyncMap(eachfn, arr, iteratee, callback) { - arr = arr || []; - var results = []; - var counter = 0; - var _iteratee = wrapAsync(iteratee); - - return eachfn(arr, (value, _, iterCb) => { - var index = counter++; - _iteratee(value, (err, v) => { - results[index] = v; - iterCb(err); - }); - }, err => { - callback(err, results); - }); - } - - function isArrayLike(value) { - return value && - typeof value.length === 'number' && - value.length >= 0 && - value.length % 1 === 0; - } - - // A temporary value used to identify if the loop should be broken. - // See #1064, #1293 - const breakLoop = {}; - var breakLoop$1 = breakLoop; - - function once(fn) { - function wrapper (...args) { - if (fn === null) return; - var callFn = fn; - fn = null; - callFn.apply(this, args); - } - Object.assign(wrapper, fn); - return wrapper - } - - function getIterator (coll) { - return coll[Symbol.iterator] && coll[Symbol.iterator](); - } - - function createArrayIterator(coll) { - var i = -1; - var len = coll.length; - return function next() { - return ++i < len ? {value: coll[i], key: i} : null; - } - } - - function createES2015Iterator(iterator) { - var i = -1; - return function next() { - var item = iterator.next(); - if (item.done) - return null; - i++; - return {value: item.value, key: i}; - } - } - - function createObjectIterator(obj) { - var okeys = obj ? Object.keys(obj) : []; - var i = -1; - var len = okeys.length; - return function next() { - var key = okeys[++i]; - if (key === '__proto__') { - return next(); - } - return i < len ? {value: obj[key], key} : null; - }; - } - - function createIterator(coll) { - if (isArrayLike(coll)) { - return createArrayIterator(coll); - } - - var iterator = getIterator(coll); - return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); - } - - function onlyOnce(fn) { - return function (...args) { - if (fn === null) throw new Error("Callback was already called."); - var callFn = fn; - fn = null; - callFn.apply(this, args); - }; - } - - // for async generators - function asyncEachOfLimit(generator, limit, iteratee, callback) { - let done = false; - let canceled = false; - let awaiting = false; - let running = 0; - let idx = 0; - - function replenish() { - //console.log('replenish') - if (running >= limit || awaiting || done) return - //console.log('replenish awaiting') - awaiting = true; - generator.next().then(({value, done: iterDone}) => { - //console.log('got value', value) - if (canceled || done) return - awaiting = false; - if (iterDone) { - done = true; - if (running <= 0) { - //console.log('done nextCb') - callback(null); - } - return; - } - running++; - iteratee(value, idx, iterateeCallback); - idx++; - replenish(); - }).catch(handleError); - } - - function iterateeCallback(err, result) { - //console.log('iterateeCallback') - running -= 1; - if (canceled) return - if (err) return handleError(err) - - if (err === false) { - done = true; - canceled = true; - return - } - - if (result === breakLoop$1 || (done && running <= 0)) { - done = true; - //console.log('done iterCb') - return callback(null); - } - replenish(); - } - - function handleError(err) { - if (canceled) return - awaiting = false; - done = true; - callback(err); - } - - replenish(); + function cargo(worker, concurrency, payload) { + return queue$1(worker, concurrency, payload); } - var eachOfLimit$2 = (limit) => { - return (obj, iteratee, callback) => { - callback = once(callback); - if (limit <= 0) { - throw new RangeError('concurrency limit cannot be less than 1') - } - if (!obj) { - return callback(null); - } - if (isAsyncGenerator(obj)) { - return asyncEachOfLimit(obj, limit, iteratee, callback) - } - if (isAsyncIterable(obj)) { - return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) - } - var nextElem = createIterator(obj); - var done = false; - var canceled = false; - var running = 0; - var looping = false; - - function iterateeCallback(err, value) { - if (canceled) return - running -= 1; - if (err) { - done = true; - callback(err); - } - else if (err === false) { - done = true; - canceled = true; - } - else if (value === breakLoop$1 || (done && running <= 0)) { - done = true; - return callback(null); - } - else if (!looping) { - replenish(); - } - } - - function replenish () { - looping = true; - while (running < limit && !done) { - var elem = nextElem(); - if (elem === null) { - done = true; - if (running <= 0) { - callback(null); - } - return; - } - running += 1; - iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); - } - looping = false; - } - - replenish(); - }; - }; - /** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a - * time. + * Reduces `coll` into a single value using an async `iteratee` to return each + * successive step. `memo` is the initial state of the reduction. This function + * only operates in series. * - * @name eachOfLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. The `key` is the item's key, or index in the case of an - * array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - */ - function eachOfLimit(coll, limit, iteratee, callback) { - return eachOfLimit$2(limit)(coll, wrapAsync(iteratee), callback); - } - - var eachOfLimit$1 = awaitify(eachOfLimit, 4); - - // eachOf implementation optimized for array-likes - function eachOfArrayLike(coll, iteratee, callback) { - callback = once(callback); - var index = 0, - completed = 0, - {length} = coll, - canceled = false; - if (length === 0) { - callback(null); - } - - function iteratorCallback(err, value) { - if (err === false) { - canceled = true; - } - if (canceled === true) return - if (err) { - callback(err); - } else if ((++completed === length) || value === breakLoop$1) { - callback(null); - } - } - - for (; index < length; index++) { - iteratee(coll[index], index, onlyOnce(iteratorCallback)); - } - } - - // a generic version of eachOf which can handle array, object, and iterator cases. - function eachOfGeneric (coll, iteratee, callback) { - return eachOfLimit$1(coll, Infinity, iteratee, callback); - } - - /** - * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument - * to the iteratee. + * For performance reasons, it may make sense to split a call to this function + * into a parallel map, and then use the normal `Array.prototype.reduce` on the + * results. This function is for situations where each step in the reduction + * needs to be async; if you can get the data before reducing it, then it's + * probably a good idea to do so. * - * @name eachOf + * @name reduce * @static * @memberOf module:Collections * @method - * @alias forEachOf + * @alias inject + * @alias foldl * @category Collection - * @see [async.each]{@link module:Collections.each} * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each - * item in `coll`. - * The `key` is the item's key, or index in the case of an array. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed * @example * - * // dev.json is a file containing a valid json object config for dev environment - * // dev.json is a file containing a valid json object config for test environment - * // prod.json is a file containing a valid json object config for prod environment - * // invalid.json is a file with a malformed json object + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist * - * let configs = {}; //global variable - * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; - * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; * - * // asynchronous function that reads a json file and parses the contents as json object - * function parseFile(file, key, callback) { - * fs.readFile(file, "utf8", function(err, data) { - * if (err) return calback(err); - * try { - * configs[key] = JSON.parse(data); - * } catch (e) { - * return callback(e); + * // asynchronous function that computes the file size in bytes + * // file size is added to the memoized value, then returned + * function getFileSizeInBytes(memo, file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); * } - * callback(); + * callback(null, memo + stat.size); * }); * } * * // Using callbacks - * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { * if (err) { - * console.error(err); + * console.log(err); * } else { - * console.log(configs); - * // configs is now a map of JSON data, e.g. - * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files * } * }); * - * //Error handing - * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { * if (err) { - * console.error(err); - * // JSON parse error exception + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] * } else { - * console.log(configs); + * console.log(result); * } * }); * * // Using Promises - * async.forEachOf(validConfigFileMap, parseFile) - * .then( () => { - * console.log(configs); - * // configs is now a map of JSON data, e.g. - * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * async.reduce(fileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files * }).catch( err => { - * console.error(err); + * console.log(err); * }); * - * //Error handing - * async.forEachOf(invalidConfigFileMap, parseFile) - * .then( () => { - * console.log(configs); + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); * }).catch( err => { - * console.error(err); - * // JSON parse error exception + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] * }); * * // Using async/await * async () => { * try { - * let result = await async.forEachOf(validConfigFileMap, parseFile); - * console.log(configs); - * // configs is now a map of JSON data, e.g. - * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files * } * catch (err) { * console.log(err); * } * } * - * //Error handing + * // Error Handling * async () => { * try { - * let result = await async.forEachOf(invalidConfigFileMap, parseFile); - * console.log(configs); + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); * } * catch (err) { * console.log(err); - * // JSON parse error exception + * // [ Error: ENOENT: no such file or directory ] * } * } * */ - function eachOf(coll, iteratee, callback) { - var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; - return eachOfImplementation(coll, wrapAsync(iteratee), callback); + function reduce(coll, memo, iteratee, callback) { + callback = once(callback); + var _iteratee = wrapAsync(iteratee); + return eachOfSeries$1(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); } - - var eachOf$1 = awaitify(eachOf, 3); + var reduce$1 = awaitify(reduce, 4); /** - * Produces a new collection of values by mapping each value in `coll` through - * the `iteratee` function. The `iteratee` is called with an item from `coll` - * and a callback for when it has finished processing. Each of these callbacks - * takes 2 arguments: an `error`, and the transformed item from `coll`. If - * `iteratee` passes an error to its callback, the main `callback` (for the - * `map` function) is immediately called with the error. + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. * - * Note, that since this function applies the `iteratee` to each item in - * parallel, there is no guarantee that the `iteratee` functions will complete - * in order. However, the results array will be in the same order as the - * original `coll`. + * Each function is executed with the `this` binding of the composed function. * - * If `map` is passed an Object, the results will be an Array. The results - * will roughly be in the order of the original Objects' keys (but this can - * vary across JavaScript engines). + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example * - * @name map + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ + function seq(...functions) { + var _functions = functions.map(wrapAsync); + return function (...args) { + var that = this; + + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = promiseCallback(); + } + + reduce$1(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, + (err, results) => cb(err, ...results)); + + return cb[PROMISE_SYMBOL] + }; + } + + /** + * Creates a function which is a composition of the passed asynchronous + * functions. Each function consumes the return value of the function that + * follows. Composing functions `f()`, `g()`, and `h()` would produce the result + * of `f(g(h()))`, only this version uses callbacks to obtain the return values. + * + * If the last argument to the composed function is not a function, a promise + * is returned when you call it. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name compose + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} an asynchronous function that is the composed + * asynchronous `functions` + * @example + * + * function add1(n, callback) { + * setTimeout(function () { + * callback(null, n + 1); + * }, 10); + * } + * + * function mul3(n, callback) { + * setTimeout(function () { + * callback(null, n * 3); + * }, 10); + * } + * + * var add1mul3 = async.compose(mul3, add1); + * add1mul3(4, function (err, result) { + * // result now equals 15 + * }); + */ + function compose(...args) { + return seq(...args.reverse()); + } + + /** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit * @static * @memberOf module:Collections * @method + * @see [async.map]{@link module:Collections.map} * @category Collection * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. * @param {AsyncFunction} iteratee - An async function to apply to each item in * `coll`. * The iteratee should complete with the transformed item. * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an Array of the + * functions have finished, or an error occurs. Results is an array of the * transformed items from the `coll`. Invoked with (err, results). * @returns {Promise} a promise, if no callback is passed - * @example + */ + function mapLimit (coll, limit, iteratee, callback) { + return _asyncMap(eachOfLimit$2(limit), coll, iteratee, callback) + } + var mapLimit$1 = awaitify(mapLimit, 4); + + /** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. * - * // file1.txt is a file that is 1000 bytes in size - * // file2.txt is a file that is 2000 bytes in size - * // file3.txt is a file that is 3000 bytes in size - * // file4.txt does not exist + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); + } + var concatLimit$1 = awaitify(concatLimit, 4); + + /** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. * - * const fileList = ['file1.txt','file2.txt','file3.txt']; - * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example * - * // asynchronous function that returns the file size in bytes - * function getFileSizeInBytes(file, callback) { - * fs.stat(file, function(err, stat) { - * if (err) { - * return callback(err); - * } - * callback(null, stat.size); - * }); - * } + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * let directoryList = ['dir1','dir2','dir3']; + * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; * * // Using callbacks - * async.map(fileList, getFileSizeInBytes, function(err, results) { - * if (err) { - * console.log(err); - * } else { - * console.log(results); - * // results is now an array of the file size in bytes for each file, e.g. - * // [ 1000, 2000, 3000] - * } + * async.concat(directoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } * }); * * // Error Handling - * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { - * if (err) { - * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * } else { - * console.log(results); - * } + * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } else { + * console.log(results); + * } * }); * * // Using Promises - * async.map(fileList, getFileSizeInBytes) - * .then( results => { + * async.concat(directoryList, fs.readdir) + * .then(results => { * console.log(results); - * // results is now an array of the file size in bytes for each file, e.g. - * // [ 1000, 2000, 3000] - * }).catch( err => { - * console.log(err); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); * }); * * // Error Handling - * async.map(withMissingFileList, getFileSizeInBytes) - * .then( results => { + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { * console.log(results); - * }).catch( err => { + * }).catch(err => { * console.log(err); * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist * }); * * // Using async/await * async () => { * try { - * let results = await async.map(fileList, getFileSizeInBytes); + * let results = await async.concat(directoryList, fs.readdir); * console.log(results); - * // results is now an array of the file size in bytes for each file, e.g. - * // [ 1000, 2000, 3000] - * } - * catch (err) { + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { * console.log(err); * } * } @@ -49890,1135 +42717,1206 @@ module.exports = Zip; * // Error Handling * async () => { * try { - * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * let results = await async.concat(withMissingDirectoryList, fs.readdir); * console.log(results); - * } - * catch (err) { + * } catch (err) { * console.log(err); * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist * } * } * */ - function map (coll, iteratee, callback) { - return _asyncMap(eachOf$1, coll, iteratee, callback) + function concat(coll, iteratee, callback) { + return concatLimit$1(coll, Infinity, iteratee, callback) } - var map$1 = awaitify(map, 3); + var concat$1 = awaitify(concat, 3); /** - * Applies the provided arguments to each function in the array, calling - * `callback` after all functions have completed. If you only provide the first - * argument, `fns`, then it will return a function which lets you pass in the - * arguments as if it were a single function call. If more arguments are - * provided, `callback` is required while `args` is still optional. The results - * for each of the applied async functions are passed to the final callback - * as an array. + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. * - * @name applyEach + * @name concatSeries * @static - * @memberOf module:ControlFlow + * @memberOf module:Collections * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s - * to all call with the same arguments - * @param {...*} [args] - any number of separate arguments to pass to the - * function. - * @param {Function} [callback] - the final argument should be the callback, - * called when all functions have completed processing. - * @returns {AsyncFunction} - Returns a function that takes no args other than - * an optional callback, that is the result of applying the `args` to each - * of the functions. + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatSeries(coll, iteratee, callback) { + return concatLimit$1(coll, 1, iteratee, callback) + } + var concatSeries$1 = awaitify(concatSeries, 3); + + /** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. * @example * - * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); * - * appliedFn((err, results) => { - * // results[0] is the results for `enableSearch` - * // results[1] is the results for `updateSchema` - * }); + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); * - * // partial application example: - * async.each( - * buckets, - * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), - * callback - * ); + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); */ - var applyEach = applyEach$1(map$1); + function constant$1(...args) { + return function (...ignoredArgs/*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; + } + + function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = wrapAsync(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, breakLoop$1); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; + } /** - * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. * - * @name eachOfSeries + * @name detect * @static * @memberOf module:Collections * @method - * @see [async.eachOf]{@link module:Collections.eachOf} - * @alias forEachOfSeries - * @category Collection + * @alias find + * @category Collections * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * Invoked with (item, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Invoked with (err). + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * */ - function eachOfSeries(coll, iteratee, callback) { - return eachOfLimit$1(coll, 1, iteratee, callback) + function detect(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) } - var eachOfSeries$1 = awaitify(eachOfSeries, 3); + var detect$1 = awaitify(detect, 3); /** - * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. * - * @name mapSeries + * @name detectLimit * @static * @memberOf module:Collections * @method - * @see [async.map]{@link module:Collections.map} - * @category Collection + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an array of the - * transformed items from the `coll`. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted */ - function mapSeries (coll, iteratee, callback) { - return _asyncMap(eachOfSeries$1, coll, iteratee, callback) + function detectLimit(coll, limit, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit$2(limit), coll, iteratee, callback) } - var mapSeries$1 = awaitify(mapSeries, 3); + var detectLimit$1 = awaitify(detectLimit, 4); /** - * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. * - * @name applyEachSeries + * @name detectSeries * @static - * @memberOf module:ControlFlow + * @memberOf module:Collections * @method - * @see [async.applyEach]{@link module:ControlFlow.applyEach} - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all - * call with the same arguments - * @param {...*} [args] - any number of separate arguments to pass to the - * function. - * @param {Function} [callback] - the final argument should be the callback, - * called when all functions have completed processing. - * @returns {AsyncFunction} - A function, that when called, is the result of - * appling the `args` to the list of functions. It takes no args, other than - * a callback. - */ - var applyEachSeries = applyEach$1(mapSeries$1); - - const PROMISE_SYMBOL = Symbol('promiseCallback'); + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ + function detectSeries(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit$2(1), coll, iteratee, callback) + } - function promiseCallback () { - let resolve, reject; - function callback (err, ...args) { - if (err) return reject(err) - resolve(args.length > 1 ? args : args[0]); + var detectSeries$1 = awaitify(detectSeries, 3); + + function consoleFunc(name) { + return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { + /* istanbul ignore else */ + if (typeof console === 'object') { + /* istanbul ignore else */ + if (err) { + /* istanbul ignore else */ + if (console.error) { + console.error(err); + } + } else if (console[name]) { /* istanbul ignore else */ + resultArgs.forEach(x => console[name](x)); + } + } + }) + } + + /** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ + var dir = consoleFunc('dir'); + + /** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doWhilst(iteratee, test, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); } - callback[PROMISE_SYMBOL] = new Promise((res, rej) => { - resolve = res, - reject = rej; - }); + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } - return callback + return check(null, true); } + var doWhilst$1 = awaitify(doWhilst, 3); + /** - * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on - * their requirements. Each function can optionally depend on other functions - * being completed first, and each function is run as soon as its requirements - * are satisfied. + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. * - * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence - * will stop. Further tasks will not execute (so any other functions depending - * on it will not run), and the main `callback` is immediately called with the + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doUntil(iteratee, test, callback) { + const _test = wrapAsync(test); + return doWhilst$1(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb (err, !truth)); + }, callback); + } + + function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); + } + + /** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the * error. * - * {@link AsyncFunction}s also receive an object containing the results of functions which - * have completed so far as the first argument, if they have dependencies. If a - * task function has no dependencies, it will only be passed a callback. + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. * - * @name auto + * @name each * @static - * @memberOf module:ControlFlow + * @memberOf module:Collections * @method - * @category Control Flow - * @param {Object} tasks - An object. Each of its properties is either a - * function or an array of requirements, with the {@link AsyncFunction} itself the last item - * in the array. The object's key of a property serves as the name of the task - * defined by that property, i.e. can be used when specifying requirements for - * other tasks. The function receives one or two arguments: - * * a `results` object, containing the results of the previously executed - * functions, only passed if the task has any dependencies, - * * a `callback(err, result)` function, which must be called when finished, - * passing an `error` (which can be `null`) and the result of the function's - * execution. - * @param {number} [concurrency=Infinity] - An optional `integer` for - * determining the maximum number of tasks that can be run in parallel. By - * default, as many as possible. - * @param {Function} [callback] - An optional callback which is called when all - * the tasks have been completed. It receives the `err` argument if any `tasks` - * pass an error to their callback. Results are always returned; however, if an - * error occurs, no further `tasks` will be performed, and the results object - * will only contain partial results. Invoked with (err, results). - * @returns {Promise} a promise, if a callback is not passed + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted * @example * - * //Using Callbacks - * async.auto({ - * get_data: function(callback) { - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: ['get_data', 'make_folder', function(results, callback) { - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(results, callback) { - * // once the file is written let's email a link to it... - * callback(null, {'file':results.write_file, 'email':'user@example.com'}); - * }] - * }, function(err, results) { - * if (err) { - * console.log('err = ', err); + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); * } - * console.log('results = ', results); - * // results = { - * // get_data: ['data', 'converted to array'] - * // make_folder; 'folder', - * // write_file: 'filename' - * // email_link: { file: 'filename', email: 'user@example.com' } - * // } * }); * - * //Using Promises - * async.auto({ - * get_data: function(callback) { - * console.log('in get_data'); - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * console.log('in make_folder'); - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: ['get_data', 'make_folder', function(results, callback) { - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(results, callback) { - * // once the file is written let's email a link to it... - * callback(null, {'file':results.write_file, 'email':'user@example.com'}); - * }] - * }).then(results => { - * console.log('results = ', results); - * // results = { - * // get_data: ['data', 'converted to array'] - * // make_folder; 'folder', - * // write_file: 'filename' - * // email_link: { file: 'filename', email: 'user@example.com' } - * // } - * }).catch(err => { - * console.log('err = ', err); + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted * }); * - * //Using async/await + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await * async () => { * try { - * let results = await async.auto({ - * get_data: function(callback) { - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: ['get_data', 'make_folder', function(results, callback) { - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(results, callback) { - * // once the file is written let's email a link to it... - * callback(null, {'file':results.write_file, 'email':'user@example.com'}); - * }] - * }); - * console.log('results = ', results); - * // results = { - * // get_data: ['data', 'converted to array'] - * // make_folder; 'folder', - * // write_file: 'filename' - * // email_link: { file: 'filename', email: 'user@example.com' } - * // } + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); * } * catch (err) { * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted * } * } * */ - function auto(tasks, concurrency, callback) { - if (typeof concurrency !== 'number') { - // concurrency is optional, shift the args. - callback = concurrency; - concurrency = null; - } - callback = once(callback || promiseCallback()); - var numTasks = Object.keys(tasks).length; - if (!numTasks) { - return callback(null); - } - if (!concurrency) { - concurrency = numTasks; - } - - var results = {}; - var runningTasks = 0; - var canceled = false; - var hasError = false; - - var listeners = Object.create(null); + function eachLimit$2(coll, iteratee, callback) { + return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } - var readyTasks = []; + var each = awaitify(eachLimit$2, 3); - // for cycle detection: - var readyToCheck = []; // tasks that have been identified as reachable - // without the possibility of returning to an ancestor task - var uncheckedDependencies = {}; + /** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachLimit(coll, limit, iteratee, callback) { + return eachOfLimit$2(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } + var eachLimit$1 = awaitify(eachLimit, 4); - Object.keys(tasks).forEach(key => { - var task = tasks[key]; - if (!Array.isArray(task)) { - // no dependencies - enqueueTask(key, [task]); - readyToCheck.push(key); - return; - } + /** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. - var dependencies = task.slice(0, task.length - 1); - var remainingDependencies = dependencies.length; - if (remainingDependencies === 0) { - enqueueTask(key, task); - readyToCheck.push(key); - return; - } - uncheckedDependencies[key] = remainingDependencies; + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachSeries(coll, iteratee, callback) { + return eachLimit$1(coll, 1, iteratee, callback) + } + var eachSeries$1 = awaitify(eachSeries, 3); - dependencies.forEach(dependencyName => { - if (!tasks[dependencyName]) { - throw new Error('async.auto task `' + key + - '` has a non-existent dependency `' + - dependencyName + '` in ' + - dependencies.join(', ')); + /** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ + function ensureAsync(fn) { + if (isAsync(fn)) return fn; + return function (...args/*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + setImmediate$1(() => callback(...innerArgs)); + } else { + callback(...innerArgs); } - addListener(dependencyName, () => { - remainingDependencies--; - if (remainingDependencies === 0) { - enqueueTask(key, task); - } - }); }); - }); + fn.apply(this, args); + sync = false; + }; + } - checkForDeadlocks(); - processQueue(); - - function enqueueTask(key, task) { - readyTasks.push(() => runTask(key, task)); - } - - function processQueue() { - if (canceled) return - if (readyTasks.length === 0 && runningTasks === 0) { - return callback(null, results); - } - while(readyTasks.length && runningTasks < concurrency) { - var run = readyTasks.shift(); - run(); - } - - } - - function addListener(taskName, fn) { - var taskListeners = listeners[taskName]; - if (!taskListeners) { - taskListeners = listeners[taskName] = []; - } - - taskListeners.push(fn); - } - - function taskComplete(taskName) { - var taskListeners = listeners[taskName] || []; - taskListeners.forEach(fn => fn()); - processQueue(); - } + /** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function every(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) + } + var every$1 = awaitify(every, 3); + /** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everyLimit(coll, limit, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfLimit$2(limit), coll, iteratee, callback) + } + var everyLimit$1 = awaitify(everyLimit, 4); - function runTask(key, task) { - if (hasError) return; + /** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everySeries(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) + } + var everySeries$1 = awaitify(everySeries, 3); - var taskCallback = onlyOnce((err, ...result) => { - runningTasks--; - if (err === false) { - canceled = true; - return - } - if (result.length < 2) { - [result] = result; - } - if (err) { - var safeResults = {}; - Object.keys(results).forEach(rkey => { - safeResults[rkey] = results[rkey]; - }); - safeResults[key] = result; - hasError = true; - listeners = Object.create(null); - if (canceled) return - callback(err, safeResults); - } else { - results[key] = result; - taskComplete(key); - } + function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); }); - - runningTasks++; - var taskFn = wrapAsync(task[task.length - 1]); - if (task.length > 1) { - taskFn(results, taskCallback); - } else { - taskFn(taskCallback); - } - } - - function checkForDeadlocks() { - // Kahn's algorithm - // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm - // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html - var currentTask; - var counter = 0; - while (readyToCheck.length) { - currentTask = readyToCheck.pop(); - counter++; - getDependents(currentTask).forEach(dependent => { - if (--uncheckedDependencies[dependent] === 0) { - readyToCheck.push(dependent); - } - }); - } - - if (counter !== numTasks) { - throw new Error( - 'async.auto cannot execute tasks due to a recursive dependency' - ); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); } - } + callback(null, results); + }); + } - function getDependents(taskName) { - var result = []; - Object.keys(tasks).forEach(key => { - const task = tasks[key]; - if (Array.isArray(task) && task.indexOf(taskName) >= 0) { - result.push(key); + function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({index, value: x}); } + iterCb(err); }); - return result; - } - - return callback[PROMISE_SYMBOL] + }, err => { + if (err) return callback(err); + callback(null, results + .sort((a, b) => a.index - b.index) + .map(v => v.value)); + }); } - var FN_ARGS = /^(?:async\s+)?(?:function)?\s*\w*\s*\(\s*([^)]+)\s*\)(?:\s*{)/; - var ARROW_FN_ARGS = /^(?:async\s+)?\(?\s*([^)=]+)\s*\)?(?:\s*=>)/; - var FN_ARG_SPLIT = /,/; - var FN_ARG = /(=.+)?(\s*)$/; - - function stripComments(string) { - let stripped = ''; - let index = 0; - let endBlockComment = string.indexOf('*/'); - while (index < string.length) { - if (string[index] === '/' && string[index+1] === '/') { - // inline comment - let endIndex = string.indexOf('\n', index); - index = (endIndex === -1) ? string.length : endIndex; - } else if ((endBlockComment !== -1) && (string[index] === '/') && (string[index+1] === '*')) { - // block comment - let endIndex = string.indexOf('*/', index); - if (endIndex !== -1) { - index = endIndex + 2; - endBlockComment = string.indexOf('*/', index); - } else { - stripped += string[index]; - index++; - } - } else { - stripped += string[index]; - index++; - } - } - return stripped; + function _filter(eachfn, coll, iteratee, callback) { + var filter = isArrayLike(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, wrapAsync(iteratee), callback); } - function parseParams(func) { - const src = stripComments(func.toString()); - let match = src.match(FN_ARGS); - if (!match) { - match = src.match(ARROW_FN_ARGS); - } - if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) - let [, args] = match; - return args - .replace(/\s/g, '') - .split(FN_ARG_SPLIT) - .map((arg) => arg.replace(FN_ARG, '').trim()); + /** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function filter (coll, iteratee, callback) { + return _filter(eachOf$1, coll, iteratee, callback) } + var filter$1 = awaitify(filter, 3); /** - * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent - * tasks are specified as parameters to the function, after the usual callback - * parameter, with the parameter names matching the names of the tasks it - * depends on. This can provide even more readable task graphs which can be - * easier to maintain. + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. * - * If a final callback is specified, the task results are similarly injected, - * specified as named parameters after the initial error parameter. + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ + function filterLimit (coll, limit, iteratee, callback) { + return _filter(eachOfLimit$2(limit), coll, iteratee, callback) + } + var filterLimit$1 = awaitify(filterLimit, 4); + + /** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. * - * The autoInject function is purely syntactic sugar and its semantics are - * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ + function filterSeries (coll, iteratee, callback) { + return _filter(eachOfSeries$1, coll, iteratee, callback) + } + var filterSeries$1 = awaitify(filterSeries, 3); + + /** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. * - * @name autoInject + * @name forever * @static * @memberOf module:ControlFlow * @method - * @see [async.auto]{@link module:ControlFlow.auto} * @category Control Flow - * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of - * the form 'func([dependencies...], callback). The object's key of a property - * serves as the name of the task defined by that property, i.e. can be used - * when specifying requirements for other tasks. - * * The `callback` parameter is a `callback(err, result)` which must be called - * when finished, passing an `error` (which can be `null`) and the result of - * the function's execution. The remaining parameters name other tasks on - * which the task is dependent, and the results from those tasks are the - * arguments of those parameters. - * @param {Function} [callback] - An optional callback which is called when all - * the tasks have been completed. It receives the `err` argument if any `tasks` - * pass an error to their callback, and a `results` object with any completed - * task results, similar to `auto`. - * @returns {Promise} a promise, if no callback is passed + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed * @example * - * // The example from `auto` can be rewritten as follows: - * async.autoInject({ - * get_data: function(callback) { - * // async code to get some data - * callback(null, 'data', 'converted to array'); - * }, - * make_folder: function(callback) { - * // async code to create a directory to store a file in - * // this is run at the same time as getting the data - * callback(null, 'folder'); - * }, - * write_file: function(get_data, make_folder, callback) { - * // once there is some data and the directory exists, - * // write the data to a file in the directory - * callback(null, 'filename'); + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. * }, - * email_link: function(write_file, callback) { - * // once the file is written let's email a link to it... - * // write_file contains the filename returned by write_file. - * callback(null, {'file':write_file, 'email':'user@example.com'}); + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. * } - * }, function(err, results) { - * console.log('err = ', err); - * console.log('email_link = ', results.email_link); - * }); - * - * // If you are using a JS minifier that mangles parameter names, `autoInject` - * // will not work with plain functions, since the parameter names will be - * // collapsed to a single letter identifier. To work around this, you can - * // explicitly specify the names of the parameters your task function needs - * // in an array, similar to Angular.js dependency injection. - * - * // This still has an advantage over plain `auto`, since the results a task - * // depends on are still spread into arguments. - * async.autoInject({ - * //... - * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { - * callback(null, 'filename'); - * }], - * email_link: ['write_file', function(write_file, callback) { - * callback(null, {'file':write_file, 'email':'user@example.com'}); - * }] - * //... - * }, function(err, results) { - * console.log('err = ', err); - * console.log('email_link = ', results.email_link); - * }); + * ); */ - function autoInject(tasks, callback) { - var newTasks = {}; - - Object.keys(tasks).forEach(key => { - var taskFn = tasks[key]; - var params; - var fnIsAsync = isAsync(taskFn); - var hasNoDeps = - (!fnIsAsync && taskFn.length === 1) || - (fnIsAsync && taskFn.length === 0); - - if (Array.isArray(taskFn)) { - params = [...taskFn]; - taskFn = params.pop(); - - newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); - } else if (hasNoDeps) { - // no dependencies, use the function as-is - newTasks[key] = taskFn; - } else { - params = parseParams(taskFn); - if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { - throw new Error("autoInject task functions require explicit parameters."); - } - - // remove callback param - if (!fnIsAsync) params.pop(); - - newTasks[key] = params.concat(newTask); - } - - function newTask(results, taskCb) { - var newArgs = params.map(name => results[name]); - newArgs.push(taskCb); - wrapAsync(taskFn)(...newArgs); - } - }); - - return auto(newTasks, callback); - } - - // Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation - // used for queues. This implementation assumes that the node provided by the user can be modified - // to adjust the next and last properties. We implement only the minimal functionality - // for queue support. - class DLL { - constructor() { - this.head = this.tail = null; - this.length = 0; - } - - removeLink(node) { - if (node.prev) node.prev.next = node.next; - else this.head = node.next; - if (node.next) node.next.prev = node.prev; - else this.tail = node.prev; - - node.prev = node.next = null; - this.length -= 1; - return node; - } - - empty () { - while(this.head) this.shift(); - return this; - } - - insertAfter(node, newNode) { - newNode.prev = node; - newNode.next = node.next; - if (node.next) node.next.prev = newNode; - else this.tail = newNode; - node.next = newNode; - this.length += 1; - } - - insertBefore(node, newNode) { - newNode.prev = node.prev; - newNode.next = node; - if (node.prev) node.prev.next = newNode; - else this.head = newNode; - node.prev = newNode; - this.length += 1; - } - - unshift(node) { - if (this.head) this.insertBefore(this.head, node); - else setInitial(this, node); - } - - push(node) { - if (this.tail) this.insertAfter(this.tail, node); - else setInitial(this, node); - } - - shift() { - return this.head && this.removeLink(this.head); - } - - pop() { - return this.tail && this.removeLink(this.tail); - } - - toArray() { - return [...this] - } - - *[Symbol.iterator] () { - var cur = this.head; - while (cur) { - yield cur.data; - cur = cur.next; - } - } + function forever(fn, errback) { + var done = onlyOnce(errback); + var task = wrapAsync(ensureAsync(fn)); - remove (testFn) { - var curr = this.head; - while(curr) { - var {next} = curr; - if (testFn(curr)) { - this.removeLink(curr); - } - curr = next; - } - return this; + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); } + return next(); } + var forever$1 = awaitify(forever, 2); - function setInitial(dll, node) { - dll.length = 1; - dll.head = dll.tail = node; - } - - function queue$1(worker, concurrency, payload) { - if (concurrency == null) { - concurrency = 1; - } - else if(concurrency === 0) { - throw new RangeError('Concurrency must not be zero'); - } - - var _worker = wrapAsync(worker); - var numRunning = 0; - var workersList = []; - const events = { - error: [], - drain: [], - saturated: [], - unsaturated: [], - empty: [] - }; - - function on (event, handler) { - events[event].push(handler); - } - - function once (event, handler) { - const handleAndRemove = (...args) => { - off(event, handleAndRemove); - handler(...args); - }; - events[event].push(handleAndRemove); - } - - function off (event, handler) { - if (!event) return Object.keys(events).forEach(ev => events[ev] = []) - if (!handler) return events[event] = [] - events[event] = events[event].filter(ev => ev !== handler); - } - - function trigger (event, ...args) { - events[event].forEach(handler => handler(...args)); - } - - var processingScheduled = false; - function _insert(data, insertAtFront, rejectOnError, callback) { - if (callback != null && typeof callback !== 'function') { - throw new Error('task callback must be a function'); - } - q.started = true; - - var res, rej; - function promiseCallback (err, ...args) { - // we don't care about the error, let the global error handler - // deal with it - if (err) return rejectOnError ? rej(err) : res() - if (args.length <= 1) return res(args[0]) - res(args); - } - - var item = q._createTaskItem( - data, - rejectOnError ? promiseCallback : - (callback || promiseCallback) - ); - - if (insertAtFront) { - q._tasks.unshift(item); - } else { - q._tasks.push(item); - } - - if (!processingScheduled) { - processingScheduled = true; - setImmediate$1(() => { - processingScheduled = false; - q.process(); - }); - } - - if (rejectOnError || !callback) { - return new Promise((resolve, reject) => { - res = resolve; - rej = reject; - }) - } - } - - function _createCB(tasks) { - return function (err, ...args) { - numRunning -= 1; - - for (var i = 0, l = tasks.length; i < l; i++) { - var task = tasks[i]; - - var index = workersList.indexOf(task); - if (index === 0) { - workersList.shift(); - } else if (index > 0) { - workersList.splice(index, 1); - } + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, {key, val}); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var {hasOwnProperty} = Object.prototype; - task.callback(err, ...args); + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var {key} = mapResults[i]; + var {val} = mapResults[i]; - if (err != null) { - trigger('error', err, task.data); + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; } } - - if (numRunning <= (q.concurrency - q.buffer) ) { - trigger('unsaturated'); - } - - if (q.idle()) { - trigger('drain'); - } - q.process(); - }; - } - - function _maybeDrain(data) { - if (data.length === 0 && q.idle()) { - // call drain immediately if there are no tasks - setImmediate$1(() => trigger('drain')); - return true - } - return false - } - - const eventMethod = (name) => (handler) => { - if (!handler) { - return new Promise((resolve, reject) => { - once(name, (err, data) => { - if (err) return reject(err) - resolve(data); - }); - }) } - off(name); - on(name, handler); - - }; - - var isProcessing = false; - var q = { - _tasks: new DLL(), - _createTaskItem (data, callback) { - return { - data, - callback - }; - }, - *[Symbol.iterator] () { - yield* q._tasks[Symbol.iterator](); - }, - concurrency, - payload, - buffer: concurrency / 4, - started: false, - paused: false, - push (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, false, false, callback)) - } - return _insert(data, false, false, callback); - }, - pushAsync (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, false, true, callback)) - } - return _insert(data, false, true, callback); - }, - kill () { - off(); - q._tasks.empty(); - }, - unshift (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, true, false, callback)) - } - return _insert(data, true, false, callback); - }, - unshiftAsync (data, callback) { - if (Array.isArray(data)) { - if (_maybeDrain(data)) return - return data.map(datum => _insert(datum, true, true, callback)) - } - return _insert(data, true, true, callback); - }, - remove (testFn) { - q._tasks.remove(testFn); - }, - process () { - // Avoid trying to start too many processing operations. This can occur - // when callbacks resolve synchronously (#1267). - if (isProcessing) { - return; - } - isProcessing = true; - while(!q.paused && numRunning < q.concurrency && q._tasks.length){ - var tasks = [], data = []; - var l = q._tasks.length; - if (q.payload) l = Math.min(l, q.payload); - for (var i = 0; i < l; i++) { - var node = q._tasks.shift(); - tasks.push(node); - workersList.push(node); - data.push(node.data); - } - - numRunning += 1; - - if (q._tasks.length === 0) { - trigger('empty'); - } - if (numRunning === q.concurrency) { - trigger('saturated'); - } - - var cb = onlyOnce(_createCB(tasks)); - _worker(data, cb); - } - isProcessing = false; - }, - length () { - return q._tasks.length; - }, - running () { - return numRunning; - }, - workersList () { - return workersList; - }, - idle() { - return q._tasks.length + numRunning === 0; - }, - pause () { - q.paused = true; - }, - resume () { - if (q.paused === false) { return; } - q.paused = false; - setImmediate$1(q.process); - } - }; - // define these as fixed properties, so people get useful errors when updating - Object.defineProperties(q, { - saturated: { - writable: false, - value: eventMethod('saturated') - }, - unsaturated: { - writable: false, - value: eventMethod('unsaturated') - }, - empty: { - writable: false, - value: eventMethod('empty') - }, - drain: { - writable: false, - value: eventMethod('drain') - }, - error: { - writable: false, - value: eventMethod('error') - }, + return callback(err, result); }); - return q; } + var groupByLimit$1 = awaitify(groupByLimit, 4); + /** - * Creates a `cargo` object with the specified payload. Tasks added to the - * cargo will be processed altogether (up to the `payload` limit). If the - * `worker` is in progress, the task is queued until it becomes available. Once - * the `worker` has completed some tasks, each callback of those tasks is - * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) - * for how `cargo` and `queue` work. + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. * - * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers - * at a time, cargo passes an array of tasks to a single worker, repeating - * when the worker is finished. + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). * - * @name cargo + * @name groupBy * @static - * @memberOf module:ControlFlow + * @memberOf module:Collections * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @category Control Flow - * @param {AsyncFunction} worker - An asynchronous function for processing an array - * of queued tasks. Invoked with `(tasks, callback)`. - * @param {number} [payload=Infinity] - An optional `integer` for determining - * how many tasks should be processed per round; if omitted, the default is - * unlimited. - * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can - * attached as certain properties to listen for specific events during the - * lifecycle of the cargo and inner queue. + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed * @example * - * // create a cargo object with payload 2 - * var cargo = async.cargo(function(tasks, callback) { - * for (var i=0; i { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.groupBy(files, detectFile); + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * catch (err) { + * console.log(err); + * } + * } + * */ - function cargo$1(worker, payload) { - return queue$1(worker, 1, payload); + function groupBy (coll, iteratee, callback) { + return groupByLimit$1(coll, Infinity, iteratee, callback) } /** - * Creates a `cargoQueue` object with the specified payload. Tasks added to the - * cargoQueue will be processed together (up to the `payload` limit) in `concurrency` parallel workers. - * If the all `workers` are in progress, the task is queued until one becomes available. Once - * a `worker` has completed some tasks, each callback of those tasks is - * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) - * for how `cargo` and `queue` work. + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. * - * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers - * at a time, and [`cargo`]{@link module:ControlFlow.cargo} passes an array of tasks to a single worker, - * the cargoQueue passes an array of tasks to multiple parallel workers. + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whose + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupBySeries (coll, iteratee, callback) { + return groupByLimit$1(coll, 1, iteratee, callback) + } + + /** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. * - * @name cargoQueue + * @name log * @static - * @memberOf module:ControlFlow + * @memberOf module:Utils * @method - * @see [async.queue]{@link module:ControlFlow.queue} - * @see [async.cargo]{@link module:ControlFLow.cargo} - * @category Control Flow - * @param {AsyncFunction} worker - An asynchronous function for processing an array - * of queued tasks. Invoked with `(tasks, callback)`. - * @param {number} [concurrency=1] - An `integer` for determining how many - * `worker` functions should be run in parallel. If omitted, the concurrency - * defaults to `1`. If the concurrency is `0`, an error is thrown. - * @param {number} [payload=Infinity] - An optional `integer` for determining - * how many tasks should be processed per round; if omitted, the default is - * unlimited. - * @returns {module:ControlFlow.QueueObject} A cargoQueue object to manage the tasks. Callbacks can - * attached as certain properties to listen for specific events during the - * lifecycle of the cargoQueue and inner queue. + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. * @example * - * // create a cargoQueue object with payload 2 and concurrency 2 - * var cargoQueue = async.cargoQueue(function(tasks, callback) { - * for (var i=0; i async.log(hello, 'world'); + * 'hello world' */ - function cargo(worker, concurrency, payload) { - return queue$1(worker, concurrency, payload); + var log = consoleFunc('log'); + + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesLimit(obj, limit, iteratee, callback) { + callback = once(callback); + var newObj = {}; + var _iteratee = wrapAsync(iteratee); + return eachOfLimit$2(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); } + var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); + /** - * Reduces `coll` into a single value using an async `iteratee` to return each - * successive step. `memo` is the initial state of the reduction. This function - * only operates in series. + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. * - * For performance reasons, it may make sense to split a call to this function - * into a parallel map, and then use the normal `Array.prototype.reduce` on the - * results. This function is for situations where each step in the reduction - * needs to be async; if you can get the data before reducing it, then it's - * probably a good idea to do so. + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. * - * @name reduce + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues * @static * @memberOf module:Collections * @method - * @alias inject - * @alias foldl * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {*} memo - The initial state of the reduction. - * @param {AsyncFunction} iteratee - A function applied to each item in the - * array to produce the next step in the reduction. - * The `iteratee` should complete with the next state of the reduction. - * If the iteratee completes with an error, the reduction is stopped and the - * main `callback` is immediately called with the error. - * Invoked with (memo, item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the reduced value. Invoked with - * (err, result). + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). * @returns {Promise} a promise, if no callback is passed * @example * @@ -51027,33 +43925,45 @@ module.exports = Zip; * // file3.txt is a file that is 3000 bytes in size * // file4.txt does not exist * - * const fileList = ['file1.txt','file2.txt','file3.txt']; - * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; + * const fileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file3.txt' + * }; * - * // asynchronous function that computes the file size in bytes - * // file size is added to the memoized value, then returned - * function getFileSizeInBytes(memo, file, callback) { + * const withMissingFileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file4.txt' + * }; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, key, callback) { * fs.stat(file, function(err, stat) { * if (err) { * return callback(err); * } - * callback(null, memo + stat.size); + * callback(null, stat.size); * }); * } * * // Using callbacks - * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { + * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { * if (err) { * console.log(err); * } else { * console.log(result); - * // 6000 - * // which is the sum of the file sizes of the three files + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } * } * }); * - * // Error Handling - * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { + * // Error handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { * if (err) { * console.log(err); * // [ Error: ENOENT: no such file or directory ] @@ -51063,20 +43973,24 @@ module.exports = Zip; * }); * * // Using Promises - * async.reduce(fileList, 0, getFileSizeInBytes) + * async.mapValues(fileMap, getFileSizeInBytes) * .then( result => { * console.log(result); - * // 6000 - * // which is the sum of the file sizes of the three files - * }).catch( err => { + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * }).catch (err => { * console.log(err); * }); * * // Error Handling - * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * async.mapValues(withMissingFileMap, getFileSizeInBytes) * .then( result => { * console.log(result); - * }).catch( err => { + * }).catch (err => { * console.log(err); * // [ Error: ENOENT: no such file or directory ] * }); @@ -51084,10 +43998,14 @@ module.exports = Zip; * // Using async/await * async () => { * try { - * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * let result = await async.mapValues(fileMap, getFileSizeInBytes); * console.log(result); - * // 6000 - * // which is the sum of the file sizes of the three files + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } * } * catch (err) { * console.log(err); @@ -51097,7 +44015,7 @@ module.exports = Zip; * // Error Handling * async () => { * try { - * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); * console.log(result); * } * catch (err) { @@ -51107,1068 +44025,929 @@ module.exports = Zip; * } * */ - function reduce(coll, memo, iteratee, callback) { - callback = once(callback); - var _iteratee = wrapAsync(iteratee); - return eachOfSeries$1(coll, (x, i, iterCb) => { - _iteratee(memo, x, (err, v) => { - memo = v; - iterCb(err); - }); - }, err => callback(err, memo)); + function mapValues(obj, iteratee, callback) { + return mapValuesLimit$1(obj, Infinity, iteratee, callback) } - var reduce$1 = awaitify(reduce, 4); /** - * Version of the compose function that is more natural to read. Each function - * consumes the return value of the previous function. It is the equivalent of - * [compose]{@link module:ControlFlow.compose} with the arguments reversed. - * - * Each function is executed with the `this` binding of the composed function. + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. * - * @name seq + * @name mapValuesSeries * @static - * @memberOf module:ControlFlow + * @memberOf module:Collections * @method - * @see [async.compose]{@link module:ControlFlow.compose} - * @category Control Flow - * @param {...AsyncFunction} functions - the asynchronous functions to compose - * @returns {Function} a function that composes the `functions` in order - * @example - * - * // Requires lodash (or underscore), express3 and dresende's orm2. - * // Part of an app, that fetches cats of the logged user. - * // This example uses `seq` function to avoid overnesting and error - * // handling clutter. - * app.get('/cats', function(request, response) { - * var User = request.models.User; - * async.seq( - * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) - * function(user, fn) { - * user.getCats(fn); // 'getCats' has signature (callback(err, data)) - * } - * )(req.session.user_id, function (err, cats) { - * if (err) { - * console.error(err); - * response.json({ status: 'error', message: err.message }); - * } else { - * response.json({ status: 'ok', message: 'Cats found', data: cats }); - * } - * }); - * }); + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed */ - function seq(...functions) { - var _functions = functions.map(wrapAsync); - return function (...args) { - var that = this; - - var cb = args[args.length - 1]; - if (typeof cb == 'function') { - args.pop(); - } else { - cb = promiseCallback(); - } - - reduce$1(_functions, args, (newargs, fn, iterCb) => { - fn.apply(that, newargs.concat((err, ...nextargs) => { - iterCb(err, nextargs); - })); - }, - (err, results) => cb(err, ...results)); - - return cb[PROMISE_SYMBOL] - }; + function mapValuesSeries(obj, iteratee, callback) { + return mapValuesLimit$1(obj, 1, iteratee, callback) } /** - * Creates a function which is a composition of the passed asynchronous - * functions. Each function consumes the return value of the function that - * follows. Composing functions `f()`, `g()`, and `h()` would produce the result - * of `f(g(h()))`, only this version uses callbacks to obtain the return values. + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. * - * If the last argument to the composed function is not a function, a promise - * is returned when you call it. + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** * - * Each function is executed with the `this` binding of the composed function. + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. * - * @name compose + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize * @static - * @memberOf module:ControlFlow + * @memberOf module:Utils * @method - * @category Control Flow - * @param {...AsyncFunction} functions - the asynchronous functions to compose - * @returns {Function} an asynchronous function that is the composed - * asynchronous `functions` + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` * @example * - * function add1(n, callback) { - * setTimeout(function () { - * callback(null, n + 1); - * }, 10); - * } - * - * function mul3(n, callback) { - * setTimeout(function () { - * callback(null, n * 3); - * }, 10); - * } + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); * - * var add1mul3 = async.compose(mul3, add1); - * add1mul3(4, function (err, result) { - * // result now equals 15 + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback * }); */ - function compose(...args) { - return seq(...args.reverse()); + function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = wrapAsync(fn); + var memoized = initialParams((args, callback) => { + var key = hasher(...args); + if (key in memo) { + setImmediate$1(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; } + /* istanbul ignore file */ + /** - * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. * - * @name mapLimit + * This is used internally for browser-compatibility purposes. + * + * @name nextTick * @static - * @memberOf module:Collections + * @memberOf module:Utils * @method - * @see [async.map]{@link module:Collections.map} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with the transformed item. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Results is an array of the - * transformed items from the `coll`. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); */ - function mapLimit (coll, limit, iteratee, callback) { - return _asyncMap(eachOfLimit$2(limit), coll, iteratee, callback) - } - var mapLimit$1 = awaitify(mapLimit, 4); + var _defer; - /** - * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. - * - * @name concatLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.concat]{@link module:Collections.concat} - * @category Collection - * @alias flatMapLimit - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, - * which should use an array as its result. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is an array - * containing the concatenated results of the `iteratee` function. Invoked with - * (err, results). - * @returns A Promise, if no callback is passed - */ - function concatLimit(coll, limit, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return mapLimit$1(coll, limit, (val, iterCb) => { - _iteratee(val, (err, ...args) => { - if (err) return iterCb(err); - return iterCb(err, args); - }); - }, (err, mapResults) => { - var result = []; - for (var i = 0; i < mapResults.length; i++) { - if (mapResults[i]) { - result = result.concat(...mapResults[i]); - } - } - - return callback(err, result); - }); + if (hasNextTick) { + _defer = process.nextTick; + } else if (hasSetImmediate) { + _defer = setImmediate; + } else { + _defer = fallback; } - var concatLimit$1 = awaitify(concatLimit, 4); + + var nextTick = wrap(_defer); + + var _parallel = awaitify((eachfn, tasks, callback) => { + var results = isArrayLike(tasks) ? [] : {}; + + eachfn(tasks, (task, key, taskCb) => { + wrapAsync(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); + }, 3); /** - * Applies `iteratee` to each item in `coll`, concatenating the results. Returns - * the concatenated list. The `iteratee`s are called in parallel, and the - * results are concatenated as they return. The results array will be returned in - * the original order of `coll` passed to the `iteratee` function. + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. * - * @name concat + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel * @static - * @memberOf module:Collections + * @memberOf module:ControlFlow * @method - * @category Collection - * @alias flatMap - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, - * which should use an array as its result. Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is an array - * containing the concatenated results of the `iteratee` function. Invoked with - * (err, results). - * @returns A Promise, if no callback is passed - * @example - * - * // dir1 is a directory that contains file1.txt, file2.txt - * // dir2 is a directory that contains file3.txt, file4.txt - * // dir3 is a directory that contains file5.txt - * // dir4 does not exist + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed * - * let directoryList = ['dir1','dir2','dir3']; - * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; + * @example * - * // Using callbacks - * async.concat(directoryList, fs.readdir, function(err, results) { - * if (err) { - * console.log(err); - * } else { - * console.log(results); - * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] - * } + * //Using Callbacks + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. * }); * - * // Error Handling - * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { - * if (err) { - * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * // since dir4 does not exist - * } else { - * console.log(results); - * } + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } * }); * - * // Using Promises - * async.concat(directoryList, fs.readdir) - * .then(results => { + * //Using Promises + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { * console.log(results); - * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. * }).catch(err => { - * console.log(err); + * console.log(err); * }); * - * // Error Handling - * async.concat(withMissingDirectoryList, fs.readdir) - * .then(results => { + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }).then(results => { * console.log(results); + * // results is equal to: { one: 1, two: 2 } * }).catch(err => { * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * // since dir4 does not exist * }); * - * // Using async/await + * //Using async/await * async () => { * try { - * let results = await async.concat(directoryList, fs.readdir); + * let results = await async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]); * console.log(results); - * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] - * } catch (err) { + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * } + * catch (err) { * console.log(err); * } * } * - * // Error Handling + * // an example using an object instead of an array * async () => { * try { - * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }); * console.log(results); - * } catch (err) { + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * // since dir4 does not exist * } * } * */ - function concat(coll, iteratee, callback) { - return concatLimit$1(coll, Infinity, iteratee, callback) + function parallel(tasks, callback) { + return _parallel(eachOf$1, tasks, callback); } - var concat$1 = awaitify(concat, 3); /** - * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. * - * @name concatSeries + * @name parallelLimit * @static - * @memberOf module:Collections + * @memberOf module:ControlFlow * @method - * @see [async.concat]{@link module:Collections.concat} - * @category Collection - * @alias flatMapSeries - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. - * The iteratee should complete with an array an array of results. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is an array - * containing the concatenated results of the `iteratee` function. Invoked with - * (err, results). - * @returns A Promise, if no callback is passed + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed */ - function concatSeries(coll, iteratee, callback) { - return concatLimit$1(coll, 1, iteratee, callback) + function parallelLimit(tasks, limit, callback) { + return _parallel(eachOfLimit$2(limit), tasks, callback); } - var concatSeries$1 = awaitify(concatSeries, 3); /** - * Returns a function that when called, calls-back with the values provided. - * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to - * [`auto`]{@link module:ControlFlow.auto}. + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshiftAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. * - * @name constant - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {...*} arguments... - Any number of arguments to automatically invoke - * callback with. - * @returns {AsyncFunction} Returns a function that when invoked, automatically - * invokes the callback with the previous given arguments. * @example + * const q = async.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } * - * async.waterfall([ - * async.constant(42), - * function (value, next) { - * // value === 42 - * }, - * //... - * ], callback); - * - * async.waterfall([ - * async.constant(filename, "utf8"), - * fs.readFile, - * function (fileData, next) { - * //... - * } - * //... - * ], callback); - * - * async.auto({ - * hostname: async.constant("https://server.net/"), - * port: findFreePort, - * launchServer: ["hostname", "port", function (options, cb) { - * startServer(options, cb); - * }], - * //... - * }, callback); + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() */ - function constant$1(...args) { - return function (...ignoredArgs/*, callback*/) { - var callback = ignoredArgs.pop(); - return callback(null, ...args); - }; - } - - function _createTester(check, getResult) { - return (eachfn, arr, _iteratee, cb) => { - var testPassed = false; - var testResult; - const iteratee = wrapAsync(_iteratee); - eachfn(arr, (value, _, callback) => { - iteratee(value, (err, result) => { - if (err || err === false) return callback(err); - - if (check(result) && !testResult) { - testPassed = true; - testResult = getResult(true, value); - return callback(null, breakLoop$1); - } - callback(); - }); - }, err => { - if (err) return cb(err); - cb(null, testPassed ? testResult : getResult(false)); - }); - }; - } /** - * Returns the first value in `coll` that passes an async truth test. The - * `iteratee` is applied in parallel, meaning the first iteratee to return - * `true` will fire the detect `callback` with that result. That means the - * result might not be the first item in the original `coll` (in terms of order) - * that passes the test. - - * If order within the original `coll` is important, then look at - * [`detectSeries`]{@link module:Collections.detectSeries}. + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. * - * @name detect + * @name queue * @static - * @memberOf module:Collections + * @memberOf module:ControlFlow * @method - * @alias find - * @category Collections - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @returns {Promise} a promise, if a callback is omitted + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. * @example * - * // dir1 is a directory that contains file1.txt, file2.txt - * // dir2 is a directory that contains file3.txt, file4.txt - * // dir3 is a directory that contains file5.txt + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); * - * // asynchronous function that checks if a file exists - * function fileExists(file, callback) { - * fs.access(file, fs.constants.F_OK, (err) => { - * callback(null, !err); - * }); - * } + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() * - * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, - * function(err, result) { - * console.log(result); - * // dir1/file1.txt - * // result now equals the first file in the list that exists - * } - *); + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); * - * // Using Promises - * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) - * .then(result => { - * console.log(result); - * // dir1/file1.txt - * // result now equals the first file in the list that exists - * }).catch(err => { - * console.log(err); + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); * }); + * // callback is optional + * q.push({name: 'bar'}); * - * // Using async/await - * async () => { - * try { - * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); - * console.log(result); - * // dir1/file1.txt - * // result now equals the file in the list that exists - * } - * catch (err) { - * console.log(err); - * } - * } + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); */ - function detect(coll, iteratee, callback) { - return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) + function queue (worker, concurrency) { + var _worker = wrapAsync(worker); + return queue$1((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); } - var detect$1 = awaitify(detect, 3); - /** - * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a - * time. - * - * @name detectLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.detect]{@link module:Collections.detect} - * @alias findLimit - * @category Collections - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @returns {Promise} a promise, if a callback is omitted - */ - function detectLimit(coll, limit, iteratee, callback) { - return _createTester(bool => bool, (res, item) => item)(eachOfLimit$2(limit), coll, iteratee, callback) - } - var detectLimit$1 = awaitify(detectLimit, 4); + // Binary min-heap implementation used for priority queue. + // Implementation is stable, i.e. push time is considered for equal priorities + class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } - /** - * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. - * - * @name detectSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.detect]{@link module:Collections.detect} - * @alias findSeries - * @category Collections - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. - * The iteratee must complete with a boolean value as its result. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the `iteratee` functions have finished. - * Result will be the first item in the array that passes the truth test - * (iteratee) or the value `undefined` if none passed. Invoked with - * (err, result). - * @returns {Promise} a promise, if a callback is omitted - */ - function detectSeries(coll, iteratee, callback) { - return _createTester(bool => bool, (res, item) => item)(eachOfLimit$2(1), coll, iteratee, callback) - } + get length() { + return this.heap.length; + } - var detectSeries$1 = awaitify(detectSeries, 3); + empty () { + this.heap = []; + return this; + } - function consoleFunc(name) { - return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { - /* istanbul ignore else */ - if (typeof console === 'object') { - /* istanbul ignore else */ - if (err) { - /* istanbul ignore else */ - if (console.error) { - console.error(err); - } - } else if (console[name]) { /* istanbul ignore else */ - resultArgs.forEach(x => console[name](x)); + percUp(index) { + let p; + + while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; + + index = p; + } + } + + percDown(index) { + let l; + + while ((l=leftChi(index)) < this.heap.length) { + if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { + l = l+1; + } + + if (smaller(this.heap[index], this.heap[l])) { + break; } + + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + + index = l; } - }) - } + } - /** - * Logs the result of an [`async` function]{@link AsyncFunction} to the - * `console` using `console.dir` to display the properties of the resulting object. - * Only works in Node.js or in browsers that support `console.dir` and - * `console.error` (such as FF and Chrome). - * If multiple arguments are returned from the async function, - * `console.dir` is called on each argument in order. - * - * @name dir - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} function - The function you want to eventually apply - * all arguments to. - * @param {...*} arguments... - Any number of arguments to apply to the function. - * @example - * - * // in a module - * var hello = function(name, callback) { - * setTimeout(function() { - * callback(null, {hello: name}); - * }, 1000); - * }; - * - * // in the node repl - * node> async.dir(hello, 'world'); - * {hello: 'world'} - */ - var dir = consoleFunc('dir'); + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length-1); + } - /** - * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in - * the order of operations, the arguments `test` and `iteratee` are switched. - * - * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. - * - * @name doWhilst - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.whilst]{@link module:ControlFlow.whilst} - * @category Control Flow - * @param {AsyncFunction} iteratee - A function which is called each time `test` - * passes. Invoked with (callback). - * @param {AsyncFunction} test - asynchronous truth test to perform after each - * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the - * non-error args from the previous callback of `iteratee`. - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `iteratee` has stopped. - * `callback` will be passed an error and any arguments passed to the final - * `iteratee`'s callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if no callback is passed - */ - function doWhilst(iteratee, test, callback) { - callback = onlyOnce(callback); - var _fn = wrapAsync(iteratee); - var _test = wrapAsync(test); - var results; + unshift(node) { + return this.heap.push(node); + } - function next(err, ...args) { - if (err) return callback(err); - if (err === false) return; - results = args; - _test(...args, check); + shift() { + let [top] = this.heap; + + this.heap[0] = this.heap[this.heap.length-1]; + this.heap.pop(); + this.percDown(0); + + return top; } - function check(err, truth) { - if (err) return callback(err); - if (err === false) return; - if (!truth) return callback(null, ...results); - _fn(next); + toArray() { + return [...this]; } - return check(null, true); + *[Symbol.iterator] () { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + + remove (testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } + + this.heap.splice(j); + + for (let i = parent(this.heap.length-1); i >= 0; i--) { + this.percDown(i); + } + + return this; + } } - var doWhilst$1 = awaitify(doWhilst, 3); + function leftChi(i) { + return (i<<1)+1; + } + + function parent(i) { + return ((i+1)>>1)-1; + } + + function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } + else { + return x.pushCount < y.pushCount; + } + } /** - * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the - * argument ordering differs from `until`. + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. * - * @name doUntil + * @name priorityQueue * @static * @memberOf module:ControlFlow * @method - * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @see [async.queue]{@link module:ControlFlow.queue} * @category Control Flow - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` fails. Invoked with (callback). - * @param {AsyncFunction} test - asynchronous truth test to perform after each - * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the - * non-error args from the previous callback of `iteratee` - * @param {Function} [callback] - A callback which is called after the test - * function has passed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if no callback is passed + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are three + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * `pushAsync(task, priority, [callback])` - the same as `priorityQueue.push`, + * except this returns a promise that rejects if an error occurs. + * * The `unshift` and `unshiftAsync` methods were removed. */ - function doUntil(iteratee, test, callback) { - const _test = wrapAsync(test); - return doWhilst$1(iteratee, (...args) => { - const cb = args.pop(); - _test(...args, (err, truth) => cb (err, !truth)); - }, callback); - } + function priorityQueue(worker, concurrency) { + // Start with a normal queue + var q = queue(worker, concurrency); - function _withoutIndex(iteratee) { - return (value, index, callback) => iteratee(value, callback); + var { + push, + pushAsync + } = q; + + q._tasks = new Heap(); + q._createTaskItem = ({data, priority}, callback) => { + return { + data, + priority, + callback + }; + }; + + function createDataItems(tasks, priority) { + if (!Array.isArray(tasks)) { + return {data: tasks, priority}; + } + return tasks.map(data => { return {data, priority}; }); + } + + // Override push to accept second parameter representing priority + q.push = function(data, priority = 0, callback) { + return push(createDataItems(data, priority), callback); + }; + + q.pushAsync = function(data, priority = 0, callback) { + return pushAsync(createDataItems(data, priority), callback); + }; + + // Remove unshift functions + delete q.unshift; + delete q.unshiftAsync; + + return q; } /** - * Applies the function `iteratee` to each item in `coll`, in parallel. - * The `iteratee` is called with an item from the list, and a callback for when - * it has finished. If the `iteratee` passes an error to its `callback`, the - * main `callback` (for the `each` function) is immediately called with the - * error. - * - * Note, that since this function applies `iteratee` to each item in parallel, - * there is no guarantee that the iteratee functions will complete in order. + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. * - * @name each + * @name race * @static - * @memberOf module:Collections + * @memberOf module:ControlFlow * @method - * @alias forEach - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to - * each item in `coll`. Invoked with (item, callback). - * The array index is not passed to the iteratee. - * If you need the index, use `eachOf`. - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). * @returns {Promise} a promise, if a callback is omitted * @example * - * // dir1 is a directory that contains file1.txt, file2.txt - * // dir2 is a directory that contains file3.txt, file4.txt - * // dir3 is a directory that contains file5.txt - * // dir4 does not exist - * - * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; - * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; - * - * // asynchronous function that deletes a file - * const deleteFile = function(file, callback) { - * fs.unlink(file, callback); - * }; - * - * // Using callbacks - * async.each(fileList, deleteFile, function(err) { - * if( err ) { - * console.log(err); - * } else { - * console.log('All files have been deleted successfully'); + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier * }); - * - * // Error Handling - * async.each(withMissingFileList, deleteFile, function(err){ - * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * // since dir4/file2.txt does not exist - * // dir1/file1.txt could have been deleted - * }); - * - * // Using Promises - * async.each(fileList, deleteFile) - * .then( () => { - * console.log('All files have been deleted successfully'); - * }).catch( err => { - * console.log(err); - * }); - * - * // Error Handling - * async.each(fileList, deleteFile) - * .then( () => { - * console.log('All files have been deleted successfully'); - * }).catch( err => { - * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * // since dir4/file2.txt does not exist - * // dir1/file1.txt could have been deleted - * }); - * - * // Using async/await - * async () => { - * try { - * await async.each(files, deleteFile); - * } - * catch (err) { - * console.log(err); - * } - * } - * - * // Error Handling - * async () => { - * try { - * await async.each(withMissingFileList, deleteFile); - * } - * catch (err) { - * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * // since dir4/file2.txt does not exist - * // dir1/file1.txt could have been deleted - * } - * } - * */ - function eachLimit$2(coll, iteratee, callback) { - return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); + function race(tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + wrapAsync(tasks[i])(callback); + } } - var each = awaitify(eachLimit$2, 3); + var race$1 = awaitify(race, 2); /** - * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. * - * @name eachLimit + * @name reduceRight * @static * @memberOf module:Collections * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachLimit + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfLimit`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed */ - function eachLimit(coll, limit, iteratee, callback) { - return eachOfLimit$2(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); + function reduceRight (array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return reduce$1(reversed, memo, iteratee, callback); } - var eachLimit$1 = awaitify(eachLimit, 4); /** - * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * Wraps the async function in another function that always completes with a + * result object, even when it errors. * - * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item - * in series and therefore the iteratee functions will complete in order. - - * @name eachSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.each]{@link module:Collections.each} - * @alias forEachSeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each - * item in `coll`. - * The array index is not passed to the iteratee. - * If you need the index, use `eachOfSeries`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called when all - * `iteratee` functions have finished, or an error occurs. Invoked with (err). - * @returns {Promise} a promise, if a callback is omitted - */ - function eachSeries(coll, iteratee, callback) { - return eachLimit$1(coll, 1, iteratee, callback) - } - var eachSeries$1 = awaitify(eachSeries, 3); - - /** - * Wrap an async function and ensure it calls its callback on a later tick of - * the event loop. If the function already calls its callback on a next tick, - * no extra deferral is added. This is useful for preventing stack overflows - * (`RangeError: Maximum call stack size exceeded`) and generally keeping - * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) - * contained. ES2017 `async` functions are returned as-is -- they are immune - * to Zalgo's corrupting influences, as they always resolve on a later tick. + * The result object has either the property `error` or `value`. * - * @name ensureAsync + * @name reflect * @static * @memberOf module:Utils * @method * @category Util - * @param {AsyncFunction} fn - an async function, one that expects a node-style - * callback as its last argument. - * @returns {AsyncFunction} Returns a wrapped function with the exact same call - * signature as the function passed in. + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. * @example * - * function sometimesAsync(arg, callback) { - * if (cache[arg]) { - * return callback(null, cache[arg]); // this would be synchronous!! - * } else { - * doSomeIO(arg, callback); // this IO would be asynchronous - * } - * } - * - * // this has a risk of stack overflows if many results are cached in a row - * async.mapSeries(args, sometimesAsync, done); - * - * // this will defer sometimesAsync's callback if necessary, - * // preventing stack overflows - * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); */ - function ensureAsync(fn) { - if (isAsync(fn)) return fn; - return function (...args/*, callback*/) { - var callback = args.pop(); - var sync = true; - args.push((...innerArgs) => { - if (sync) { - setImmediate$1(() => callback(...innerArgs)); - } else { - callback(...innerArgs); + function reflect(fn) { + var _fn = wrapAsync(fn); + return initialParams(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0){ + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; } + reflectCallback(null, retVal); }); - fn.apply(this, args); - sync = false; - }; + + return _fn.apply(this, args); + }); } /** - * Returns `true` if every element in `coll` satisfies an async test. If any - * iteratee call returns `false`, the main `callback` is immediately called. + * A helper function that wraps an array or an object of functions with `reflect`. * - * @name every + * @name reflectAll * @static - * @memberOf module:Collections + * @memberOf module:Utils * @method - * @alias all - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` * @example * - * // dir1 is a directory that contains file1.txt, file2.txt - * // dir2 is a directory that contains file3.txt, file4.txt - * // dir3 is a directory that contains file5.txt - * // dir4 does not exist - * - * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; - * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; - * - * // asynchronous function that checks if a file exists - * function fileExists(file, callback) { - * fs.access(file, fs.constants.F_OK, (err) => { - * callback(null, !err); - * }); - * } - * - * // Using callbacks - * async.every(fileList, fileExists, function(err, result) { - * console.log(result); - * // true - * // result is true since every file exists - * }); - * - * async.every(withMissingFileList, fileExists, function(err, result) { - * console.log(result); - * // false - * // result is false since NOT every file exists - * }); - * - * // Using Promises - * async.every(fileList, fileExists) - * .then( result => { - * console.log(result); - * // true - * // result is true since every file exists - * }).catch( err => { - * console.log(err); - * }); + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; * - * async.every(withMissingFileList, fileExists) - * .then( result => { - * console.log(result); - * // false - * // result is false since NOT every file exists - * }).catch( err => { - * console.log(err); + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' * }); * - * // Using async/await - * async () => { - * try { - * let result = await async.every(fileList, fileExists); - * console.log(result); - * // true - * // result is true since every file exists - * } - * catch (err) { - * console.log(err); - * } - * } - * - * async () => { - * try { - * let result = await async.every(withMissingFileList, fileExists); - * console.log(result); - * // false - * // result is false since NOT every file exists - * } - * catch (err) { - * console.log(err); + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); * } - * } - * - */ - function every(coll, iteratee, callback) { - return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) - } - var every$1 = awaitify(every, 3); - - /** - * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. - * - * @name everyLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in parallel. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function everyLimit(coll, limit, iteratee, callback) { - return _createTester(bool => !bool, res => !res)(eachOfLimit$2(limit), coll, iteratee, callback) - } - var everyLimit$1 = awaitify(everyLimit, 4); - - /** - * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * }; * - * @name everySeries - * @static - * @memberOf module:Collections - * @method - * @see [async.every]{@link module:Collections.every} - * @alias allSeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collection in series. - * The iteratee must complete with a boolean result value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result will be either `true` or `false` - * depending on the values of the async tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); */ - function everySeries(coll, iteratee, callback) { - return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) - } - var everySeries$1 = awaitify(everySeries, 3); - - function filterArray(eachfn, arr, iteratee, callback) { - var truthValues = new Array(arr.length); - eachfn(arr, (x, index, iterCb) => { - iteratee(x, (err, v) => { - truthValues[index] = !!v; - iterCb(err); + function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(reflect); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = reflect.call(this, tasks[key]); }); - }, err => { - if (err) return callback(err); - var results = []; - for (var i = 0; i < arr.length; i++) { - if (truthValues[i]) results.push(arr[i]); - } - callback(null, results); - }); + } + return results; } - function filterGeneric(eachfn, coll, iteratee, callback) { - var results = []; - eachfn(coll, (x, index, iterCb) => { - iteratee(x, (err, v) => { - if (err) return iterCb(err); - if (v) { - results.push({index, value: x}); - } - iterCb(err); + function reject$2(eachfn, arr, _iteratee, callback) { + const iteratee = wrapAsync(_iteratee); + return _filter(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); }); - }, err => { - if (err) return callback(err); - callback(null, results - .sort((a, b) => a.index - b.index) - .map(v => v.value)); - }); - } - - function _filter(eachfn, coll, iteratee, callback) { - var filter = isArrayLike(coll) ? filterArray : filterGeneric; - return filter(eachfn, coll, wrapAsync(iteratee), callback); + }, callback); } /** - * Returns a new array of all the values in `coll` which pass an async truth - * test. This operation is performed in parallel, but the results array will be - * in the same order as the original. + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. * - * @name filter + * @name reject * @static * @memberOf module:Collections * @method - * @alias select + * @see [async.filter]{@link module:Collections.filter} * @category Collection * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback provided + * @returns {Promise} a promise, if no callback is passed * @example * * // dir1 is a directory that contains file1.txt, file2.txt * // dir2 is a directory that contains file3.txt, file4.txt * // dir3 is a directory that contains file5.txt * - * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; * * // asynchronous function that checks if a file exists * function fileExists(file, callback) { @@ -52178,33 +44957,28 @@ module.exports = Zip; * } * * // Using callbacks - * async.filter(files, fileExists, function(err, results) { - * if(err) { - * console.log(err); - * } else { - * console.log(results); - * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] - * // results is now an array of the existing files - * } + * async.reject(fileList, fileExists, function(err, results) { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files * }); * * // Using Promises - * async.filter(files, fileExists) - * .then(results => { + * async.reject(fileList, fileExists) + * .then( results => { * console.log(results); - * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] - * // results is now an array of the existing files - * }).catch(err => { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }).catch( err => { * console.log(err); * }); * * // Using async/await * async () => { * try { - * let results = await async.filter(files, fileExists); + * let results = await async.reject(fileList, fileExists); * console.log(results); - * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] - * // results is now an array of the existing files + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files * } * catch (err) { * console.log(err); @@ -52212,179 +44986,452 @@ module.exports = Zip; * } * */ - function filter (coll, iteratee, callback) { - return _filter(eachOf$1, coll, iteratee, callback) + function reject (coll, iteratee, callback) { + return reject$2(eachOf$1, coll, iteratee, callback) } - var filter$1 = awaitify(filter, 3); + var reject$1 = awaitify(reject, 3); /** - * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a * time. * - * @name filterLimit + * @name rejectLimit * @static * @memberOf module:Collections * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectLimit + * @see [async.reject]{@link module:Collections.reject} * @category Collection * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback provided + * @returns {Promise} a promise, if no callback is passed */ - function filterLimit (coll, limit, iteratee, callback) { - return _filter(eachOfLimit$2(limit), coll, iteratee, callback) + function rejectLimit (coll, limit, iteratee, callback) { + return reject$2(eachOfLimit$2(limit), coll, iteratee, callback) } - var filterLimit$1 = awaitify(filterLimit, 4); + var rejectLimit$1 = awaitify(rejectLimit, 4); /** - * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. * - * @name filterSeries + * @name rejectSeries * @static * @memberOf module:Collections * @method - * @see [async.filter]{@link module:Collections.filter} - * @alias selectSeries + * @see [async.reject]{@link module:Collections.reject} * @category Collection * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - A truth test to apply to each item in `coll`. - * The `iteratee` is passed a `callback(err, truthValue)`, which must be called - * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results) - * @returns {Promise} a promise, if no callback provided + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed */ - function filterSeries (coll, iteratee, callback) { - return _filter(eachOfSeries$1, coll, iteratee, callback) + function rejectSeries (coll, iteratee, callback) { + return reject$2(eachOfSeries$1, coll, iteratee, callback) } - var filterSeries$1 = awaitify(filterSeries, 3); + var rejectSeries$1 = awaitify(rejectSeries, 3); - /** - * Calls the asynchronous function `fn` with a callback parameter that allows it - * to call itself again, in series, indefinitely. + function constant(value) { + return function () { + return value; + } + } - * If an error is passed to the callback then `errback` is called with the - * error, and execution stops, otherwise it will never be called. + /** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. * - * @name forever + * @name retry * @static * @memberOf module:ControlFlow * @method * @category Control Flow - * @param {AsyncFunction} fn - an async function to call repeatedly. - * Invoked with (next). - * @param {Function} [errback] - when `fn` passes an error to it's callback, - * this function will be called, and execution stops. Invoked with (err). - * @returns {Promise} a promise that rejects if an error occurs and an errback - * is not passed + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * * @example * - * async.forever( - * function(next) { - * // next is suitable for passing to things that need a callback(err [, whatever]); - * // it will result in this function being called again. - * }, - * function(err) { - * // if next is called with a value in its first parameter, it will appear - * // in here as 'err', and execution will stop. - * } - * ); - */ - function forever(fn, errback) { - var done = onlyOnce(errback); - var task = wrapAsync(ensureAsync(fn)); - - function next(err) { - if (err) return done(err); - if (err === false) return; - task(next); - } - return next(); + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ + const DEFAULT_TIMES = 5; + const DEFAULT_INTERVAL = 0; + + function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant(DEFAULT_INTERVAL) + }; + + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || promiseCallback(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || promiseCallback(); + } + + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } + + var _task = wrapAsync(task); + + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return + if (err && attempt++ < options.times && + (typeof options.errorFilter != 'function' || + options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + + retryAttempt(); + return callback[PROMISE_SYMBOL] + } + + function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + + acc.intervalFunc = typeof t.interval === 'function' ? + t.interval : + constant(+t.interval || DEFAULT_INTERVAL); + + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } } - var forever$1 = awaitify(forever, 2); /** - * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. * - * @name groupByLimit + * @name retryable * @static - * @memberOf module:Collections + * @memberOf module:ControlFlow * @method - * @see [async.groupBy]{@link module:Collections.groupBy} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - * @returns {Promise} a promise, if no callback is passed + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); */ - function groupByLimit(coll, limit, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return mapLimit$1(coll, limit, (val, iterCb) => { - _iteratee(val, (err, key) => { - if (err) return iterCb(err); - return iterCb(err, {key, val}); - }); - }, (err, mapResults) => { - var result = {}; - // from MDN, handle object having an `hasOwnProperty` prop - var {hasOwnProperty} = Object.prototype; - - for (var i = 0; i < mapResults.length; i++) { - if (mapResults[i]) { - var {key} = mapResults[i]; - var {val} = mapResults[i]; - - if (hasOwnProperty.call(result, key)) { - result[key].push(val); - } else { - result[key] = [val]; - } - } + function retryable (opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = (opts && opts.arity) || task.length; + if (isAsync(task)) { + arity += 1; + } + var _task = wrapAsync(task); + return initialParams((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = promiseCallback(); + } + function taskFn(cb) { + _task(...args, cb); } - return callback(err, result); + if (opts) retry(opts, taskFn, callback); + else retry(taskFn, callback); + + return callback[PROMISE_SYMBOL] }); } - var groupByLimit$1 = awaitify(groupByLimit, 4); - /** - * Returns a new object, where each value corresponds to an array of items, from - * `coll`, that returned the corresponding key. That is, the keys of the object - * correspond to the values passed to the `iteratee` callback. + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. * - * Note: Since this function applies the `iteratee` to each item in parallel, - * there is no guarantee that the `iteratee` functions will complete in order. - * However, the values for each key in the `result` will be in the same order as - * the original `coll`. For Objects, the values will roughly be in the order of - * the original Objects' keys (but this can vary across JavaScript engines). + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. * - * @name groupBy + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * + * //Using Callbacks + * async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] + * }); + * + * // an example using objects instead of arrays + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.series([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function series(tasks, callback) { + return _parallel(eachOfSeries$1, tasks, callback); + } + + /** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some * @static * @memberOf module:Collections * @method + * @alias any * @category Collection * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whoses - * properties are arrays of values which returned the corresponding key. - * @returns {Promise} a promise, if no callback is passed + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided * @example * * // dir1 is a directory that contains file1.txt, file2.txt @@ -52392,43 +45439,45 @@ module.exports = Zip; * // dir3 is a directory that contains file5.txt * // dir4 does not exist * - * const files = ['dir1/file1.txt','dir2','dir4'] - * - * // asynchronous function that detects file type as none, file, or directory - * function detectFile(file, callback) { - * fs.stat(file, function(err, stat) { - * if (err) { - * return callback(null, 'none'); - * } - * callback(null, stat.isDirectory() ? 'directory' : 'file'); - * }); + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); * } * - * //Using callbacks - * async.groupBy(files, detectFile, function(err, result) { - * if(err) { - * console.log(err); - * } else { - * console.log(result); - * // { - * // file: [ 'dir1/file1.txt' ], - * // none: [ 'dir4' ], - * // directory: [ 'dir2'] - * // } - * // result is object containing the files grouped by type - * } - * }); + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); * * // Using Promises - * async.groupBy(files, detectFile) + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) * .then( result => { * console.log(result); - * // { - * // file: [ 'dir1/file1.txt' ], - * // none: [ 'dir4' ], - * // directory: [ 'dir2'] - * // } - * // result is object containing the files grouped by type + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists * }).catch( err => { * console.log(err); * }); @@ -52436,14 +45485,22 @@ module.exports = Zip; * // Using async/await * async () => { * try { - * let result = await async.groupBy(files, detectFile); + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); * console.log(result); - * // { - * // file: [ 'dir1/file1.txt' ], - * // none: [ 'dir4' ], - * // directory: [ 'dir2'] - * // } - * // result is object containing the files grouped by type + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists * } * catch (err) { * console.log(err); @@ -52451,148 +45508,92 @@ module.exports = Zip; * } * */ - function groupBy (coll, iteratee, callback) { - return groupByLimit$1(coll, Infinity, iteratee, callback) + function some(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) } + var some$1 = awaitify(some, 3); /** - * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. * - * @name groupBySeries + * @name someLimit * @static * @memberOf module:Collections * @method - * @see [async.groupBy]{@link module:Collections.groupBy} + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit * @category Collection * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a `key` to group the value under. - * Invoked with (value, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. Result is an `Object` whose - * properties are arrays of values which returned the corresponding key. - * @returns {Promise} a promise, if no callback is passed + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided */ - function groupBySeries (coll, iteratee, callback) { - return groupByLimit$1(coll, 1, iteratee, callback) + function someLimit(coll, limit, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfLimit$2(limit), coll, iteratee, callback) } + var someLimit$1 = awaitify(someLimit, 4); /** - * Logs the result of an `async` function to the `console`. Only works in - * Node.js or in browsers that support `console.log` and `console.error` (such - * as FF and Chrome). If multiple arguments are returned from the async - * function, `console.log` is called on each argument in order. - * - * @name log - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} function - The function you want to eventually apply - * all arguments to. - * @param {...*} arguments... - Any number of arguments to apply to the function. - * @example - * - * // in a module - * var hello = function(name, callback) { - * setTimeout(function() { - * callback(null, 'hello ' + name); - * }, 1000); - * }; - * - * // in the node repl - * node> async.log(hello, 'world'); - * 'hello world' - */ - var log = consoleFunc('log'); - - /** - * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a - * time. + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. * - * @name mapValuesLimit + * @name someSeries * @static * @memberOf module:Collections * @method - * @see [async.mapValues]{@link module:Collections.mapValues} + * @see [async.some]{@link module:Collections.some} + * @alias anySeries * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback is passed + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided */ - function mapValuesLimit(obj, limit, iteratee, callback) { - callback = once(callback); - var newObj = {}; - var _iteratee = wrapAsync(iteratee); - return eachOfLimit$2(limit)(obj, (val, key, next) => { - _iteratee(val, key, (err, result) => { - if (err) return next(err); - newObj[key] = result; - next(err); - }); - }, err => callback(err, newObj)); + function someSeries(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) } - - var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); + var someSeries$1 = awaitify(someSeries, 3); /** - * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. - * - * Produces a new Object by mapping each value of `obj` through the `iteratee` - * function. The `iteratee` is called each `value` and `key` from `obj` and a - * callback for when it has finished processing. Each of these callbacks takes - * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` - * passes an error to its callback, the main `callback` (for the `mapValues` - * function) is immediately called with the error. - * - * Note, the order of the keys in the result is not guaranteed. The keys will - * be roughly in the order they complete, (but this is very engine-specific) + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. * - * @name mapValues + * @name sortBy * @static * @memberOf module:Collections * @method * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback is passed + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed * @example * - * // file1.txt is a file that is 1000 bytes in size - * // file2.txt is a file that is 2000 bytes in size - * // file3.txt is a file that is 3000 bytes in size - * // file4.txt does not exist - * - * const fileMap = { - * f1: 'file1.txt', - * f2: 'file2.txt', - * f3: 'file3.txt' - * }; - * - * const withMissingFileMap = { - * f1: 'file1.txt', - * f2: 'file2.txt', - * f3: 'file4.txt' - * }; + * // bigfile.txt is a file that is 251100 bytes in size + * // mediumfile.txt is a file that is 11000 bytes in size + * // smallfile.txt is a file that is 121 bytes in size * * // asynchronous function that returns the file size in bytes - * function getFileSizeInBytes(file, key, callback) { + * function getFileSizeInBytes(file, callback) { * fs.stat(file, function(err, stat) { * if (err) { * return callback(err); @@ -52602,75 +45603,111 @@ module.exports = Zip; * } * * // Using callbacks - * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { - * if (err) { - * console.log(err); - * } else { - * console.log(result); - * // result is now a map of file size in bytes for each file, e.g. - * // { - * // f1: 1000, - * // f2: 2000, - * // f3: 3000 - * // } + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } * } - * }); + * ); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) return callback(getFileSizeErr); + * callback(null, fileSize); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // descending order + * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) { + * return callback(getFileSizeErr); + * } + * callback(null, fileSize * -1); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] + * } + * } + * ); * * // Error handling - * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { - * if (err) { - * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * } else { - * console.log(result); + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } * } - * }); + * ); * * // Using Promises - * async.mapValues(fileMap, getFileSizeInBytes) - * .then( result => { - * console.log(result); - * // result is now a map of file size in bytes for each file, e.g. - * // { - * // f1: 1000, - * // f2: 2000, - * // f3: 3000 - * // } - * }).catch (err => { + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * }).catch( err => { * console.log(err); * }); * - * // Error Handling - * async.mapValues(withMissingFileMap, getFileSizeInBytes) - * .then( result => { - * console.log(result); - * }).catch (err => { + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { * console.log(err); * // [ Error: ENOENT: no such file or directory ] * }); * * // Using async/await - * async () => { + * (async () => { * try { - * let result = await async.mapValues(fileMap, getFileSizeInBytes); - * console.log(result); - * // result is now a map of file size in bytes for each file, e.g. - * // { - * // f1: 1000, - * // f2: 2000, - * // f3: 3000 - * // } + * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] * } * catch (err) { * console.log(err); * } - * } + * })(); * - * // Error Handling + * // Error handling * async () => { * try { - * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); - * console.log(result); + * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); * } * catch (err) { * console.log(err); @@ -52679,315 +45716,311 @@ module.exports = Zip; * } * */ - function mapValues(obj, iteratee, callback) { - return mapValuesLimit$1(obj, Infinity, iteratee, callback) - } + function sortBy (coll, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return map$1(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, {value: x, criteria}); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); - /** - * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. - * - * @name mapValuesSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.mapValues]{@link module:Collections.mapValues} - * @category Collection - * @param {Object} obj - A collection to iterate over. - * @param {AsyncFunction} iteratee - A function to apply to each value and key - * in `coll`. - * The iteratee should complete with the transformed value as its result. - * Invoked with (value, key, callback). - * @param {Function} [callback] - A callback which is called when all `iteratee` - * functions have finished, or an error occurs. `result` is a new object consisting - * of each key from `obj`, with each transformed value on the right-hand side. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback is passed - */ - function mapValuesSeries(obj, iteratee, callback) { - return mapValuesLimit$1(obj, 1, iteratee, callback) + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } } + var sortBy$1 = awaitify(sortBy, 3); /** - * Caches the results of an async function. When creating a hash to store - * function results against, the callback is omitted from the hash and an - * optional hash function can be used. - * - * **Note: if the async function errs, the result will not be cached and - * subsequent calls will call the wrapped function.** - * - * If no hash function is specified, the first argument is used as a hash key, - * which may work reasonably if it is a string or a data type that converts to a - * distinct string. Note that objects and arrays will not behave reasonably. - * Neither will cases where the other arguments are significant. In such cases, - * specify your own hash function. - * - * The cache of results is exposed as the `memo` property of the function - * returned by `memoize`. + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. * - * @name memoize + * @name timeout * @static * @memberOf module:Utils * @method * @category Util - * @param {AsyncFunction} fn - The async function to proxy and cache results from. - * @param {Function} hasher - An optional function for generating a custom hash - * for storing results. It has all the arguments applied to it apart from the - * callback, and must be synchronous. - * @returns {AsyncFunction} a memoized version of `fn` + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. * @example * - * var slow_fn = function(name, callback) { - * // do something - * callback(null, result); - * }; - * var fn = async.memoize(slow_fn); + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); * - * // fn can now be used as if it were slow_fn - * fn('some name', function() { - * // callback + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' * }); */ - function memoize(fn, hasher = v => v) { - var memo = Object.create(null); - var queues = Object.create(null); - var _fn = wrapAsync(fn); - var memoized = initialParams((args, callback) => { - var key = hasher(...args); - if (key in memo) { - setImmediate$1(() => callback(null, ...memo[key])); - } else if (key in queues) { - queues[key].push(callback); - } else { - queues[key] = [callback]; - _fn(...args, (err, ...resultArgs) => { - // #1465 don't memoize if an error occurred - if (!err) { - memo[key] = resultArgs; - } - var q = queues[key]; - delete queues[key]; - for (var i = 0, l = q.length; i < l; i++) { - q[i](err, ...resultArgs); - } - }); + function timeout(asyncFn, milliseconds, info) { + var fn = wrapAsync(asyncFn); + + return initialParams((args, callback) => { + var timedOut = false; + var timer; + + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); } + + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); + + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); }); - memoized.memo = memo; - memoized.unmemoized = fn; - return memoized; } - /* istanbul ignore file */ + function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; + } /** - * Calls `callback` on a later loop around the event loop. In Node.js this just - * calls `process.nextTick`. In the browser it will use `setImmediate` if - * available, otherwise `setTimeout(callback, 0)`, which means other higher - * priority events may precede the execution of `callback`. - * - * This is used internally for browser-compatibility purposes. + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. * - * @name nextTick + * @name timesLimit * @static - * @memberOf module:Utils + * @memberOf module:ControlFlow * @method - * @see [async.setImmediate]{@link module:Utils.setImmediate} - * @category Util - * @param {Function} callback - The function to call on a later loop around - * the event loop. Invoked with (args...). - * @param {...*} args... - any number of additional arguments to pass to the - * callback on the next tick. - * @example - * - * var call_order = []; - * async.nextTick(function() { - * call_order.push('two'); - * // call_order now equals ['one','two'] - * }); - * call_order.push('one'); - * - * async.setImmediate(function (a, b, c) { - * // a, b, and c equal 1, 2, and 3 - * }, 1, 2, 3); + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided */ - var _defer; - - if (hasNextTick) { - _defer = process.nextTick; - } else if (hasSetImmediate) { - _defer = setImmediate; - } else { - _defer = fallback; + function timesLimit(count, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(range(count), limit, _iteratee, callback); } - var nextTick = wrap(_defer); - - var _parallel = awaitify((eachfn, tasks, callback) => { - var results = isArrayLike(tasks) ? [] : {}; - - eachfn(tasks, (task, key, taskCb) => { - wrapAsync(task)((err, ...result) => { - if (result.length < 2) { - [result] = result; - } - results[key] = result; - taskCb(err); - }); - }, err => callback(err, results)); - }, 3); - /** - * Run the `tasks` collection of functions in parallel, without waiting until - * the previous function has completed. If any of the functions pass an error to - * its callback, the main `callback` is immediately called with the value of the - * error. Once the `tasks` have completed, the results are passed to the final - * `callback` as an array. - * - * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about - * parallel execution of code. If your tasks do not use any timers or perform - * any I/O, they will actually be executed in series. Any synchronous setup - * sections for each task will happen one after the other. JavaScript remains - * single-threaded. - * - * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the - * execution of other tasks when a task fails. - * - * It is also possible to use an object instead of an array. Each property will - * be run as a function and the results will be passed to the final `callback` - * as an object instead of an array. This can be a more readable way of handling - * results from {@link async.parallel}. + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. * - * @name parallel + * @name times * @static * @memberOf module:ControlFlow * @method + * @see [async.map]{@link module:Collections.map} * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of - * [async functions]{@link AsyncFunction} to run. - * Each async function can complete with any number of optional `result` values. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed successfully. This function gets a results array - * (or object) containing all the result arguments passed to the task callbacks. - * Invoked with (err, results). - * @returns {Promise} a promise, if a callback is not passed - * + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided * @example * - * //Using Callbacks - * async.parallel([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ], function(err, results) { - * console.log(results); - * // results is equal to ['one','two'] even though - * // the second function had a shorter timeout. - * }); + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; * - * // an example using an object instead of an array - * async.parallel({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback) { - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }, function(err, results) { - * console.log(results); - * // results is equal to: { one: 1, two: 2 } + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users * }); + */ + function times (n, iteratee, callback) { + return timesLimit(n, Infinity, iteratee, callback) + } + + /** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. * - * //Using Promises - * async.parallel([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesSeries (n, iteratee, callback) { + return timesLimit(n, 1, iteratee, callback) + } + + /** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileList, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] * } - * ]).then(results => { - * console.log(results); - * // results is equal to ['one','two'] even though - * // the second function had a shorter timeout. - * }).catch(err => { - * console.log(err); * }); * - * // an example using an object instead of an array - * async.parallel({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback) { - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }).then(results => { - * console.log(results); - * // results is equal to: { one: 1, two: 2 } + * // Using Promises + * async.transform(fileList, transformFileSize) + * .then(result => { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] * }).catch(err => { * console.log(err); * }); * - * //Using async/await - * async () => { + * // Using async/await + * (async () => { * try { - * let results = await async.parallel([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ]); - * console.log(results); - * // results is equal to ['one','two'] even though - * // the second function had a shorter timeout. + * let result = await async.transform(fileList, transformFileSize); + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] * } * catch (err) { * console.log(err); * } + * })(); + * + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; * } * - * // an example using an object instead of an array + * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileMap, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * }); + * + * // Using Promises + * async.transform(fileMap, transformFileSize) + * .then(result => { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await * async () => { * try { - * let results = await async.parallel({ - * one: function(callback) { - * setTimeout(function() { - * callback(null, 1); - * }, 200); - * }, - * two: function(callback) { - * setTimeout(function() { - * callback(null, 2); - * }, 100); - * } - * }); - * console.log(results); - * // results is equal to: { one: 1, two: 2 } + * let result = await async.transform(fileMap, transformFileSize); + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } * } * catch (err) { * console.log(err); @@ -52995,2641 +46028,954 @@ module.exports = Zip; * } * */ - function parallel(tasks, callback) { - return _parallel(eachOf$1, tasks, callback); + function transform (coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = once(callback || promiseCallback()); + var _iteratee = wrapAsync(iteratee); + + eachOf$1(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[PROMISE_SYMBOL] } /** - * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a - * time. + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. * - * @name parallelLimit + * @name tryEach * @static * @memberOf module:ControlFlow * @method - * @see [async.parallel]{@link module:ControlFlow.parallel} * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of - * [async functions]{@link AsyncFunction} to run. - * Each async function can complete with any number of optional `result` values. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed successfully. This function gets a results array - * (or object) containing all the result arguments passed to the task callbacks. - * Invoked with (err, results). - * @returns {Promise} a promise, if a callback is not passed + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * */ - function parallelLimit(tasks, limit, callback) { - return _parallel(eachOfLimit$2(limit), tasks, callback); + function tryEach(tasks, callback) { + var error = null; + var result; + return eachSeries$1(tasks, (task, taskCb) => { + wrapAsync(task)((err, ...args) => { + if (err === false) return taskCb(err); + + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); } + var tryEach$1 = awaitify(tryEach); + /** - * A queue of tasks for the worker function to complete. - * @typedef {Iterable} QueueObject - * @memberOf module:ControlFlow - * @property {Function} length - a function returning the number of items - * waiting to be processed. Invoke with `queue.length()`. - * @property {boolean} started - a boolean indicating whether or not any - * items have been pushed and processed by the queue. - * @property {Function} running - a function returning the number of items - * currently being processed. Invoke with `queue.running()`. - * @property {Function} workersList - a function returning the array of items - * currently being processed. Invoke with `queue.workersList()`. - * @property {Function} idle - a function returning false if there are items - * waiting or being processed, or true if not. Invoke with `queue.idle()`. - * @property {number} concurrency - an integer for determining how many `worker` - * functions should be run in parallel. This property can be changed after a - * `queue` is created to alter the concurrency on-the-fly. - * @property {number} payload - an integer that specifies how many items are - * passed to the worker function at a time. only applies if this is a - * [cargo]{@link module:ControlFlow.cargo} object - * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` - * once the `worker` has finished processing the task. Instead of a single task, - * a `tasks` array can be submitted. The respective callback is used for every - * task in the list. Invoke with `queue.push(task, [callback])`, - * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. - * Invoke with `queue.unshift(task, [callback])`. - * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns - * a promise that rejects if an error occurs. - * @property {AsyncFunction} unshiftAsync - the same as `q.unshift`, except this returns - * a promise that rejects if an error occurs. - * @property {Function} remove - remove items from the queue that match a test - * function. The test function will be passed an object with a `data` property, - * and a `priority` property, if this is a - * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. - * Invoked with `queue.remove(testFn)`, where `testFn` is of the form - * `function ({data, priority}) {}` and returns a Boolean. - * @property {Function} saturated - a function that sets a callback that is - * called when the number of running workers hits the `concurrency` limit, and - * further tasks will be queued. If the callback is omitted, `q.saturated()` - * returns a promise for the next occurrence. - * @property {Function} unsaturated - a function that sets a callback that is - * called when the number of running workers is less than the `concurrency` & - * `buffer` limits, and further tasks will not be queued. If the callback is - * omitted, `q.unsaturated()` returns a promise for the next occurrence. - * @property {number} buffer - A minimum threshold buffer in order to say that - * the `queue` is `unsaturated`. - * @property {Function} empty - a function that sets a callback that is called - * when the last item from the `queue` is given to a `worker`. If the callback - * is omitted, `q.empty()` returns a promise for the next occurrence. - * @property {Function} drain - a function that sets a callback that is called - * when the last item from the `queue` has returned from the `worker`. If the - * callback is omitted, `q.drain()` returns a promise for the next occurrence. - * @property {Function} error - a function that sets a callback that is called - * when a task errors. Has the signature `function(error, task)`. If the - * callback is omitted, `error()` returns a promise that rejects on the next - * error. - * @property {boolean} paused - a boolean for determining whether the queue is - * in a paused state. - * @property {Function} pause - a function that pauses the processing of tasks - * until `resume()` is called. Invoke with `queue.pause()`. - * @property {Function} resume - a function that resumes the processing of - * queued tasks when the queue is paused. Invoke with `queue.resume()`. - * @property {Function} kill - a function that removes the `drain` callback and - * empties remaining tasks from the queue forcing it to go idle. No more tasks - * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. - * - * @example - * const q = async.queue(worker, 2) - * q.push(item1) - * q.push(item2) - * q.push(item3) - * // queues are iterable, spread into an array to inspect - * const items = [...q] // [item1, item2, item3] - * // or use for of - * for (let item of q) { - * console.log(item) - * } + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. * - * q.drain(() => { - * console.log('all done') - * }) - * // or - * await q.drain() + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function */ + function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; + } /** - * Creates a `queue` object with the specified `concurrency`. Tasks added to the - * `queue` are processed in parallel (up to the `concurrency` limit). If all - * `worker`s are in progress, the task is queued until one becomes available. - * Once a `worker` completes a `task`, that `task`'s callback is called. + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. * - * @name queue + * @name whilst * @static * @memberOf module:ControlFlow * @method * @category Control Flow - * @param {AsyncFunction} worker - An async function for processing a queued task. - * If you want to handle errors from an individual task, pass a callback to - * `q.push()`. Invoked with (task, callback). - * @param {number} [concurrency=1] - An `integer` for determining how many - * `worker` functions should be run in parallel. If omitted, the concurrency - * defaults to `1`. If the concurrency is `0`, an error is thrown. - * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be - * attached as certain properties to listen for specific events during the - * lifecycle of the queue. + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed * @example * - * // create a queue object with concurrency 2 - * var q = async.queue(function(task, callback) { - * console.log('hello ' + task.name); - * callback(); - * }, 2); + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ + function whilst(test, iteratee, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); + } + var whilst$1 = awaitify(whilst, 3); + + /** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. * - * // assign a callback - * q.drain(function() { - * console.log('all items have been processed'); - * }); - * // or await the end - * await q.drain() + * The inverse of [whilst]{@link module:ControlFlow.whilst}. * - * // assign an error callback - * q.error(function(err, task) { - * console.error('task experienced an error'); - * }); - * - * // add some items to the queue - * q.push({name: 'foo'}, function(err) { - * console.log('finished processing foo'); - * }); - * // callback is optional - * q.push({name: 'bar'}); - * - * // add some items to the queue (batch-wise) - * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { - * console.log('finished processing item'); - * }); - * - * // add some items to the front of the queue - * q.unshift({name: 'bar'}, function (err) { - * console.log('finished processing bar'); - * }); - */ - function queue (worker, concurrency) { - var _worker = wrapAsync(worker); - return queue$1((items, cb) => { - _worker(items[0], cb); - }, concurrency, 1); - } - - // Binary min-heap implementation used for priority queue. - // Implementation is stable, i.e. push time is considered for equal priorities - class Heap { - constructor() { - this.heap = []; - this.pushCount = Number.MIN_SAFE_INTEGER; - } - - get length() { - return this.heap.length; - } - - empty () { - this.heap = []; - return this; - } - - percUp(index) { - let p; - - while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { - let t = this.heap[index]; - this.heap[index] = this.heap[p]; - this.heap[p] = t; - - index = p; - } - } - - percDown(index) { - let l; - - while ((l=leftChi(index)) < this.heap.length) { - if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { - l = l+1; - } - - if (smaller(this.heap[index], this.heap[l])) { - break; - } - - let t = this.heap[index]; - this.heap[index] = this.heap[l]; - this.heap[l] = t; - - index = l; - } - } - - push(node) { - node.pushCount = ++this.pushCount; - this.heap.push(node); - this.percUp(this.heap.length-1); - } - - unshift(node) { - return this.heap.push(node); - } - - shift() { - let [top] = this.heap; - - this.heap[0] = this.heap[this.heap.length-1]; - this.heap.pop(); - this.percDown(0); - - return top; - } - - toArray() { - return [...this]; - } - - *[Symbol.iterator] () { - for (let i = 0; i < this.heap.length; i++) { - yield this.heap[i].data; - } - } - - remove (testFn) { - let j = 0; - for (let i = 0; i < this.heap.length; i++) { - if (!testFn(this.heap[i])) { - this.heap[j] = this.heap[i]; - j++; - } - } - - this.heap.splice(j); - - for (let i = parent(this.heap.length-1); i >= 0; i--) { - this.percDown(i); - } - - return this; - } - } - - function leftChi(i) { - return (i<<1)+1; - } - - function parent(i) { - return ((i+1)>>1)-1; - } - - function smaller(x, y) { - if (x.priority !== y.priority) { - return x.priority < y.priority; - } - else { - return x.pushCount < y.pushCount; - } - } - - /** - * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and - * completed in ascending priority order. - * - * @name priorityQueue + * @name until * @static * @memberOf module:ControlFlow * @method - * @see [async.queue]{@link module:ControlFlow.queue} + * @see [async.whilst]{@link module:ControlFlow.whilst} * @category Control Flow - * @param {AsyncFunction} worker - An async function for processing a queued task. - * If you want to handle errors from an individual task, pass a callback to - * `q.push()`. - * Invoked with (task, callback). - * @param {number} concurrency - An `integer` for determining how many `worker` - * functions should be run in parallel. If omitted, the concurrency defaults to - * `1`. If the concurrency is `0`, an error is thrown. - * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are three - * differences between `queue` and `priorityQueue` objects: - * * `push(task, priority, [callback])` - `priority` should be a number. If an - * array of `tasks` is given, all tasks will be assigned the same priority. - * * `pushAsync(task, priority, [callback])` - the same as `priorityQueue.push`, - * except this returns a promise that rejects if an error occurs. - * * The `unshift` and `unshiftAsync` methods were removed. + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) */ - function priorityQueue(worker, concurrency) { - // Start with a normal queue - var q = queue(worker, concurrency); - - var { - push, - pushAsync - } = q; - - q._tasks = new Heap(); - q._createTaskItem = ({data, priority}, callback) => { - return { - data, - priority, - callback - }; - }; - - function createDataItems(tasks, priority) { - if (!Array.isArray(tasks)) { - return {data: tasks, priority}; - } - return tasks.map(data => { return {data, priority}; }); - } - - // Override push to accept second parameter representing priority - q.push = function(data, priority = 0, callback) { - return push(createDataItems(data, priority), callback); - }; - - q.pushAsync = function(data, priority = 0, callback) { - return pushAsync(createDataItems(data, priority), callback); - }; - - // Remove unshift functions - delete q.unshift; - delete q.unshiftAsync; - - return q; + function until(test, iteratee, callback) { + const _test = wrapAsync(test); + return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); } /** - * Runs the `tasks` array of functions in parallel, without waiting until the - * previous function has completed. Once any of the `tasks` complete or pass an - * error to its callback, the main `callback` is immediately called. It's - * equivalent to `Promise.race()`. + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. * - * @name race + * @name waterfall * @static * @memberOf module:ControlFlow * @method * @category Control Flow - * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} - * to run. Each function can complete with an optional `result` value. - * @param {Function} callback - A callback to run once any of the functions have - * completed. This function gets an error or result from the first function that - * completed. Invoked with (err, result). + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). * @returns {Promise} a promise, if a callback is omitted * @example * - * async.race([ + * async.waterfall([ * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); + * callback(null, 'one', 'two'); * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); * } - * ], - * // main callback - * function(err, result) { - * // the result will be equal to 'two' as it finishes earlier + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } */ - function race(tasks, callback) { + function waterfall (tasks, callback) { callback = once(callback); - if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); if (!tasks.length) return callback(); - for (var i = 0, l = tasks.length; i < l; i++) { - wrapAsync(tasks[i])(callback); + var taskIndex = 0; + + function nextTask(args) { + var task = wrapAsync(tasks[taskIndex++]); + task(...args, onlyOnce(next)); } - } - var race$1 = awaitify(race, 2); + function next(err, ...args) { + if (err === false) return + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } - /** - * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. - * - * @name reduceRight - * @static - * @memberOf module:Collections - * @method - * @see [async.reduce]{@link module:Collections.reduce} - * @alias foldr - * @category Collection - * @param {Array} array - A collection to iterate over. - * @param {*} memo - The initial state of the reduction. - * @param {AsyncFunction} iteratee - A function applied to each item in the - * array to produce the next step in the reduction. - * The `iteratee` should complete with the next state of the reduction. - * If the iteratee completes with an error, the reduction is stopped and the - * main `callback` is immediately called with the error. - * Invoked with (memo, item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the reduced value. Invoked with - * (err, result). - * @returns {Promise} a promise, if no callback is passed - */ - function reduceRight (array, memo, iteratee, callback) { - var reversed = [...array].reverse(); - return reduce$1(reversed, memo, iteratee, callback); + nextTask([]); } - /** - * Wraps the async function in another function that always completes with a - * result object, even when it errors. - * - * The result object has either the property `error` or `value`. - * - * @name reflect - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} fn - The async function you want to wrap - * @returns {Function} - A function that always passes null to it's callback as - * the error. The second argument to the callback will be an `object` with - * either an `error` or a `value` property. - * @example - * - * async.parallel([ - * async.reflect(function(callback) { - * // do some stuff ... - * callback(null, 'one'); - * }), - * async.reflect(function(callback) { - * // do some more stuff but error ... - * callback('bad stuff happened'); - * }), - * async.reflect(function(callback) { - * // do some more stuff ... - * callback(null, 'two'); - * }) - * ], - * // optional callback - * function(err, results) { - * // values - * // results[0].value = 'one' - * // results[1].error = 'bad stuff happened' - * // results[2].value = 'two' - * }); - */ - function reflect(fn) { - var _fn = wrapAsync(fn); - return initialParams(function reflectOn(args, reflectCallback) { - args.push((error, ...cbArgs) => { - let retVal = {}; - if (error) { - retVal.error = error; - } - if (cbArgs.length > 0){ - var value = cbArgs; - if (cbArgs.length <= 1) { - [value] = cbArgs; - } - retVal.value = value; - } - reflectCallback(null, retVal); - }); - - return _fn.apply(this, args); - }); - } + var waterfall$1 = awaitify(waterfall); /** - * A helper function that wraps an array or an object of functions with `reflect`. - * - * @name reflectAll - * @static - * @memberOf module:Utils - * @method - * @see [async.reflect]{@link module:Utils.reflect} - * @category Util - * @param {Array|Object|Iterable} tasks - The collection of - * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. - * @returns {Array} Returns an array of async functions, each wrapped in - * `async.reflect` - * @example + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. * - * let tasks = [ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * // do some more stuff but error ... - * callback(new Error('bad stuff happened')); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ]; + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. * - * async.parallel(async.reflectAll(tasks), - * // optional callback - * function(err, results) { - * // values - * // results[0].value = 'one' - * // results[1].error = Error('bad stuff happened') - * // results[2].value = 'two' - * }); + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) * - * // an example using an object instead of an array - * let tasks = { - * one: function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * two: function(callback) { - * callback('two'); - * }, - * three: function(callback) { - * setTimeout(function() { - * callback(null, 'three'); - * }, 100); - * } - * }; + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. * - * async.parallel(async.reflectAll(tasks), - * // optional callback - * function(err, results) { - * // values - * // results.one.value = 'one' - * // results.two.error = 'two' - * // results.three.value = 'three' - * }); + * @typedef {Function} AsyncFunction + * @static */ - function reflectAll(tasks) { - var results; - if (Array.isArray(tasks)) { - results = tasks.map(reflect); - } else { - results = {}; - Object.keys(tasks).forEach(key => { - results[key] = reflect.call(this, tasks[key]); - }); - } - return results; - } - function reject$2(eachfn, arr, _iteratee, callback) { - const iteratee = wrapAsync(_iteratee); - return _filter(eachfn, arr, (value, cb) => { - iteratee(value, (err, v) => { - cb(err, !v); - }); - }, callback); - } - /** - * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. - * - * @name reject - * @static - * @memberOf module:Collections - * @method - * @see [async.filter]{@link module:Collections.filter} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - * @example - * - * // dir1 is a directory that contains file1.txt, file2.txt - * // dir2 is a directory that contains file3.txt, file4.txt - * // dir3 is a directory that contains file5.txt - * - * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; - * - * // asynchronous function that checks if a file exists - * function fileExists(file, callback) { - * fs.access(file, fs.constants.F_OK, (err) => { - * callback(null, !err); - * }); - * } - * - * // Using callbacks - * async.reject(fileList, fileExists, function(err, results) { - * // [ 'dir3/file6.txt' ] - * // results now equals an array of the non-existing files - * }); - * - * // Using Promises - * async.reject(fileList, fileExists) - * .then( results => { - * console.log(results); - * // [ 'dir3/file6.txt' ] - * // results now equals an array of the non-existing files - * }).catch( err => { - * console.log(err); - * }); - * - * // Using async/await - * async () => { - * try { - * let results = await async.reject(fileList, fileExists); - * console.log(results); - * // [ 'dir3/file6.txt' ] - * // results now equals an array of the non-existing files - * } - * catch (err) { - * console.log(err); - * } - * } - * - */ - function reject (coll, iteratee, callback) { - return reject$2(eachOf$1, coll, iteratee, callback) - } - var reject$1 = awaitify(reject, 3); + var index = { + apply, + applyEach, + applyEachSeries, + asyncify, + auto, + autoInject, + cargo: cargo$1, + cargoQueue: cargo, + compose, + concat: concat$1, + concatLimit: concatLimit$1, + concatSeries: concatSeries$1, + constant: constant$1, + detect: detect$1, + detectLimit: detectLimit$1, + detectSeries: detectSeries$1, + dir, + doUntil, + doWhilst: doWhilst$1, + each, + eachLimit: eachLimit$1, + eachOf: eachOf$1, + eachOfLimit: eachOfLimit$1, + eachOfSeries: eachOfSeries$1, + eachSeries: eachSeries$1, + ensureAsync, + every: every$1, + everyLimit: everyLimit$1, + everySeries: everySeries$1, + filter: filter$1, + filterLimit: filterLimit$1, + filterSeries: filterSeries$1, + forever: forever$1, + groupBy, + groupByLimit: groupByLimit$1, + groupBySeries, + log, + map: map$1, + mapLimit: mapLimit$1, + mapSeries: mapSeries$1, + mapValues, + mapValuesLimit: mapValuesLimit$1, + mapValuesSeries, + memoize, + nextTick, + parallel, + parallelLimit, + priorityQueue, + queue, + race: race$1, + reduce: reduce$1, + reduceRight, + reflect, + reflectAll, + reject: reject$1, + rejectLimit: rejectLimit$1, + rejectSeries: rejectSeries$1, + retry, + retryable, + seq, + series, + setImmediate: setImmediate$1, + some: some$1, + someLimit: someLimit$1, + someSeries: someSeries$1, + sortBy: sortBy$1, + timeout, + times, + timesLimit, + timesSeries, + transform, + tryEach: tryEach$1, + unmemoize, + until, + waterfall: waterfall$1, + whilst: whilst$1, - /** - * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a - * time. - * - * @name rejectLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.reject]{@link module:Collections.reject} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - */ - function rejectLimit (coll, limit, iteratee, callback) { - return reject$2(eachOfLimit$2(limit), coll, iteratee, callback) - } - var rejectLimit$1 = awaitify(rejectLimit, 4); + // aliases + all: every$1, + allLimit: everyLimit$1, + allSeries: everySeries$1, + any: some$1, + anyLimit: someLimit$1, + anySeries: someSeries$1, + find: detect$1, + findLimit: detectLimit$1, + findSeries: detectSeries$1, + flatMap: concat$1, + flatMapLimit: concatLimit$1, + flatMapSeries: concatSeries$1, + forEach: each, + forEachSeries: eachSeries$1, + forEachLimit: eachLimit$1, + forEachOf: eachOf$1, + forEachOfSeries: eachOfSeries$1, + forEachOfLimit: eachOfLimit$1, + inject: reduce$1, + foldl: reduce$1, + foldr: reduceRight, + select: filter$1, + selectLimit: filterLimit$1, + selectSeries: filterSeries$1, + wrapSync: asyncify, + during: whilst$1, + doDuring: doWhilst$1 + }; - /** - * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. - * - * @name rejectSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.reject]{@link module:Collections.reject} - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {Function} iteratee - An async truth test to apply to each item in - * `coll`. - * The should complete with a boolean value as its `result`. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Invoked with (err, results). - * @returns {Promise} a promise, if no callback is passed - */ - function rejectSeries (coll, iteratee, callback) { - return reject$2(eachOfSeries$1, coll, iteratee, callback) - } - var rejectSeries$1 = awaitify(rejectSeries, 3); + exports.all = every$1; + exports.allLimit = everyLimit$1; + exports.allSeries = everySeries$1; + exports.any = some$1; + exports.anyLimit = someLimit$1; + exports.anySeries = someSeries$1; + exports.apply = apply; + exports.applyEach = applyEach; + exports.applyEachSeries = applyEachSeries; + exports.asyncify = asyncify; + exports.auto = auto; + exports.autoInject = autoInject; + exports.cargo = cargo$1; + exports.cargoQueue = cargo; + exports.compose = compose; + exports.concat = concat$1; + exports.concatLimit = concatLimit$1; + exports.concatSeries = concatSeries$1; + exports.constant = constant$1; + exports.default = index; + exports.detect = detect$1; + exports.detectLimit = detectLimit$1; + exports.detectSeries = detectSeries$1; + exports.dir = dir; + exports.doDuring = doWhilst$1; + exports.doUntil = doUntil; + exports.doWhilst = doWhilst$1; + exports.during = whilst$1; + exports.each = each; + exports.eachLimit = eachLimit$1; + exports.eachOf = eachOf$1; + exports.eachOfLimit = eachOfLimit$1; + exports.eachOfSeries = eachOfSeries$1; + exports.eachSeries = eachSeries$1; + exports.ensureAsync = ensureAsync; + exports.every = every$1; + exports.everyLimit = everyLimit$1; + exports.everySeries = everySeries$1; + exports.filter = filter$1; + exports.filterLimit = filterLimit$1; + exports.filterSeries = filterSeries$1; + exports.find = detect$1; + exports.findLimit = detectLimit$1; + exports.findSeries = detectSeries$1; + exports.flatMap = concat$1; + exports.flatMapLimit = concatLimit$1; + exports.flatMapSeries = concatSeries$1; + exports.foldl = reduce$1; + exports.foldr = reduceRight; + exports.forEach = each; + exports.forEachLimit = eachLimit$1; + exports.forEachOf = eachOf$1; + exports.forEachOfLimit = eachOfLimit$1; + exports.forEachOfSeries = eachOfSeries$1; + exports.forEachSeries = eachSeries$1; + exports.forever = forever$1; + exports.groupBy = groupBy; + exports.groupByLimit = groupByLimit$1; + exports.groupBySeries = groupBySeries; + exports.inject = reduce$1; + exports.log = log; + exports.map = map$1; + exports.mapLimit = mapLimit$1; + exports.mapSeries = mapSeries$1; + exports.mapValues = mapValues; + exports.mapValuesLimit = mapValuesLimit$1; + exports.mapValuesSeries = mapValuesSeries; + exports.memoize = memoize; + exports.nextTick = nextTick; + exports.parallel = parallel; + exports.parallelLimit = parallelLimit; + exports.priorityQueue = priorityQueue; + exports.queue = queue; + exports.race = race$1; + exports.reduce = reduce$1; + exports.reduceRight = reduceRight; + exports.reflect = reflect; + exports.reflectAll = reflectAll; + exports.reject = reject$1; + exports.rejectLimit = rejectLimit$1; + exports.rejectSeries = rejectSeries$1; + exports.retry = retry; + exports.retryable = retryable; + exports.select = filter$1; + exports.selectLimit = filterLimit$1; + exports.selectSeries = filterSeries$1; + exports.seq = seq; + exports.series = series; + exports.setImmediate = setImmediate$1; + exports.some = some$1; + exports.someLimit = someLimit$1; + exports.someSeries = someSeries$1; + exports.sortBy = sortBy$1; + exports.timeout = timeout; + exports.times = times; + exports.timesLimit = timesLimit; + exports.timesSeries = timesSeries; + exports.transform = transform; + exports.tryEach = tryEach$1; + exports.unmemoize = unmemoize; + exports.until = until; + exports.waterfall = waterfall$1; + exports.whilst = whilst$1; + exports.wrapSync = asyncify; - function constant(value) { - return function () { - return value; - } - } + Object.defineProperty(exports, '__esModule', { value: true }); - /** - * Attempts to get a successful response from `task` no more than `times` times - * before returning an error. If the task is successful, the `callback` will be - * passed the result of the successful task. If all attempts fail, the callback - * will be passed the error and result (if any) of the final attempt. - * - * @name retry - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @see [async.retryable]{@link module:ControlFlow.retryable} - * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an - * object with `times` and `interval` or a number. - * * `times` - The number of attempts to make before giving up. The default - * is `5`. - * * `interval` - The time to wait between retries, in milliseconds. The - * default is `0`. The interval may also be specified as a function of the - * retry count (see example). - * * `errorFilter` - An optional synchronous function that is invoked on - * erroneous result. If it returns `true` the retry attempts will continue; - * if the function returns `false` the retry flow is aborted with the current - * attempt's error and result being returned to the final callback. - * Invoked with (err). - * * If `opts` is a number, the number specifies the number of times to retry, - * with the default interval of `0`. - * @param {AsyncFunction} task - An async function to retry. - * Invoked with (callback). - * @param {Function} [callback] - An optional callback which is called when the - * task has succeeded, or after the final failed attempt. It receives the `err` - * and `result` arguments of the last attempt at completing the `task`. Invoked - * with (err, results). - * @returns {Promise} a promise if no callback provided - * - * @example - * - * // The `retry` function can be used as a stand-alone control flow by passing - * // a callback, as shown below: - * - * // try calling apiMethod 3 times - * async.retry(3, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod 3 times, waiting 200 ms between each retry - * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod 10 times with exponential backoff - * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) - * async.retry({ - * times: 10, - * interval: function(retryCount) { - * return 50 * Math.pow(2, retryCount); - * } - * }, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod the default 5 times no delay between each retry - * async.retry(apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // try calling apiMethod only when error condition satisfies, all other - * // errors will abort the retry control flow and return to final callback - * async.retry({ - * errorFilter: function(err) { - * return err.message === 'Temporary error'; // only retry on a specific error - * } - * }, apiMethod, function(err, result) { - * // do something with the result - * }); - * - * // to retry individual methods that are not as reliable within other - * // control flow functions, use the `retryable` wrapper: - * async.auto({ - * users: api.getUsers.bind(api), - * payments: async.retryable(3, api.getPayments.bind(api)) - * }, function(err, results) { - * // do something with the results - * }); - * - */ - const DEFAULT_TIMES = 5; - const DEFAULT_INTERVAL = 0; +})); - function retry(opts, task, callback) { - var options = { - times: DEFAULT_TIMES, - intervalFunc: constant(DEFAULT_INTERVAL) - }; - if (arguments.length < 3 && typeof opts === 'function') { - callback = task || promiseCallback(); - task = opts; - } else { - parseTimes(options, opts); - callback = callback || promiseCallback(); - } +/***/ }), - if (typeof task !== 'function') { - throw new Error("Invalid arguments for async.retry"); - } +/***/ 40785: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var _task = wrapAsync(task); +module.exports = +{ + parallel : __nccwpck_require__(37585), + serial : __nccwpck_require__(75758), + serialOrdered : __nccwpck_require__(23209) +}; - var attempt = 1; - function retryAttempt() { - _task((err, ...args) => { - if (err === false) return - if (err && attempt++ < options.times && - (typeof options.errorFilter != 'function' || - options.errorFilter(err))) { - setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); - } else { - callback(err, ...args); - } - }); - } - retryAttempt(); - return callback[PROMISE_SYMBOL] - } +/***/ }), - function parseTimes(acc, t) { - if (typeof t === 'object') { - acc.times = +t.times || DEFAULT_TIMES; +/***/ 15953: +/***/ ((module) => { - acc.intervalFunc = typeof t.interval === 'function' ? - t.interval : - constant(+t.interval || DEFAULT_INTERVAL); +// API +module.exports = abort; - acc.errorFilter = t.errorFilter; - } else if (typeof t === 'number' || typeof t === 'string') { - acc.times = +t || DEFAULT_TIMES; - } else { - throw new Error("Invalid arguments for async.retry"); - } - } +/** + * Aborts leftover active jobs + * + * @param {object} state - current state object + */ +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); - /** - * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method - * wraps a task and makes it retryable, rather than immediately calling it - * with retries. - * - * @name retryable - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.retry]{@link module:ControlFlow.retry} - * @category Control Flow - * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional - * options, exactly the same as from `retry`, except for a `opts.arity` that - * is the arity of the `task` function, defaulting to `task.length` - * @param {AsyncFunction} task - the asynchronous function to wrap. - * This function will be passed any arguments passed to the returned wrapper. - * Invoked with (...args, callback). - * @returns {AsyncFunction} The wrapped function, which when invoked, will - * retry on an error, based on the parameters specified in `opts`. - * This function will accept the same parameters as `task`. - * @example - * - * async.auto({ - * dep1: async.retryable(3, getFromFlakyService), - * process: ["dep1", async.retryable(3, function (results, cb) { - * maybeProcessData(results.dep1, cb); - * })] - * }, callback); - */ - function retryable (opts, task) { - if (!task) { - task = opts; - opts = null; - } - let arity = (opts && opts.arity) || task.length; - if (isAsync(task)) { - arity += 1; - } - var _task = wrapAsync(task); - return initialParams((args, callback) => { - if (args.length < arity - 1 || callback == null) { - args.push(callback); - callback = promiseCallback(); - } - function taskFn(cb) { - _task(...args, cb); - } + // reset leftover jobs + state.jobs = {}; +} - if (opts) retry(opts, taskFn, callback); - else retry(taskFn, callback); +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } +} - return callback[PROMISE_SYMBOL] - }); - } - /** - * Run the functions in the `tasks` collection in series, each one running once - * the previous function has completed. If any functions in the series pass an - * error to its callback, no more functions are run, and `callback` is - * immediately called with the value of the error. Otherwise, `callback` - * receives an array of results when `tasks` have completed. - * - * It is also possible to use an object instead of an array. Each property will - * be run as a function, and the results will be passed to the final `callback` - * as an object instead of an array. This can be a more readable way of handling - * results from {@link async.series}. - * - * **Note** that while many implementations preserve the order of object - * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) - * explicitly states that - * - * > The mechanics and order of enumerating the properties is not specified. - * - * So if you rely on the order in which your series of functions are executed, - * and want this to work on all platforms, consider using an array. - * - * @name series - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing - * [async functions]{@link AsyncFunction} to run in series. - * Each function can complete with any number of optional `result` values. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed. This function gets a results array (or object) - * containing all the result arguments passed to the `task` callbacks. Invoked - * with (err, result). - * @return {Promise} a promise, if no callback is passed - * @example - * - * //Using Callbacks - * async.series([ - * function(callback) { - * setTimeout(function() { - * // do some async task - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * // then do another async task - * callback(null, 'two'); - * }, 100); - * } - * ], function(err, results) { - * console.log(results); - * // results is equal to ['one','two'] - * }); - * - * // an example using objects instead of arrays - * async.series({ - * one: function(callback) { - * setTimeout(function() { - * // do some async task - * callback(null, 1); - * }, 200); - * }, - * two: function(callback) { - * setTimeout(function() { - * // then do another async task - * callback(null, 2); - * }, 100); - * } - * }, function(err, results) { - * console.log(results); - * // results is equal to: { one: 1, two: 2 } - * }); - * - * //Using Promises - * async.series([ - * function(callback) { - * setTimeout(function() { - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * callback(null, 'two'); - * }, 100); - * } - * ]).then(results => { - * console.log(results); - * // results is equal to ['one','two'] - * }).catch(err => { - * console.log(err); - * }); - * - * // an example using an object instead of an array - * async.series({ - * one: function(callback) { - * setTimeout(function() { - * // do some async task - * callback(null, 1); - * }, 200); - * }, - * two: function(callback) { - * setTimeout(function() { - * // then do another async task - * callback(null, 2); - * }, 100); - * } - * }).then(results => { - * console.log(results); - * // results is equal to: { one: 1, two: 2 } - * }).catch(err => { - * console.log(err); - * }); - * - * //Using async/await - * async () => { - * try { - * let results = await async.series([ - * function(callback) { - * setTimeout(function() { - * // do some async task - * callback(null, 'one'); - * }, 200); - * }, - * function(callback) { - * setTimeout(function() { - * // then do another async task - * callback(null, 'two'); - * }, 100); - * } - * ]); - * console.log(results); - * // results is equal to ['one','two'] - * } - * catch (err) { - * console.log(err); - * } - * } - * - * // an example using an object instead of an array - * async () => { - * try { - * let results = await async.parallel({ - * one: function(callback) { - * setTimeout(function() { - * // do some async task - * callback(null, 1); - * }, 200); - * }, - * two: function(callback) { - * setTimeout(function() { - * // then do another async task - * callback(null, 2); - * }, 100); - * } - * }); - * console.log(results); - * // results is equal to: { one: 1, two: 2 } - * } - * catch (err) { - * console.log(err); - * } - * } - * - */ - function series(tasks, callback) { - return _parallel(eachOfSeries$1, tasks, callback); - } +/***/ }), - /** - * Returns `true` if at least one element in the `coll` satisfies an async test. - * If any iteratee call returns `true`, the main `callback` is immediately - * called. - * - * @name some - * @static - * @memberOf module:Collections - * @method - * @alias any - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - * @example - * - * // dir1 is a directory that contains file1.txt, file2.txt - * // dir2 is a directory that contains file3.txt, file4.txt - * // dir3 is a directory that contains file5.txt - * // dir4 does not exist - * - * // asynchronous function that checks if a file exists - * function fileExists(file, callback) { - * fs.access(file, fs.constants.F_OK, (err) => { - * callback(null, !err); - * }); - * } - * - * // Using callbacks - * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, - * function(err, result) { - * console.log(result); - * // true - * // result is true since some file in the list exists - * } - *); - * - * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, - * function(err, result) { - * console.log(result); - * // false - * // result is false since none of the files exists - * } - *); - * - * // Using Promises - * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) - * .then( result => { - * console.log(result); - * // true - * // result is true since some file in the list exists - * }).catch( err => { - * console.log(err); - * }); - * - * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) - * .then( result => { - * console.log(result); - * // false - * // result is false since none of the files exists - * }).catch( err => { - * console.log(err); - * }); - * - * // Using async/await - * async () => { - * try { - * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); - * console.log(result); - * // true - * // result is true since some file in the list exists - * } - * catch (err) { - * console.log(err); - * } - * } - * - * async () => { - * try { - * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); - * console.log(result); - * // false - * // result is false since none of the files exists - * } - * catch (err) { - * console.log(err); - * } - * } - * - */ - function some(coll, iteratee, callback) { - return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) +/***/ 92094: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var defer = __nccwpck_require__(83563); + +// API +module.exports = async; + +/** + * Runs provided callback asynchronously + * even if callback itself is not + * + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback + */ +function async(callback) +{ + var isAsync = false; + + // check if async happened + defer(function() { isAsync = true; }); + + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); } - var some$1 = awaitify(some, 3); + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; +} - /** - * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. - * - * @name someLimit - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anyLimit - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in parallel. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function someLimit(coll, limit, iteratee, callback) { - return _createTester(Boolean, res => res)(eachOfLimit$2(limit), coll, iteratee, callback) + +/***/ }), + +/***/ 83563: +/***/ ((module) => { + +module.exports = defer; + +/** + * Runs provided function on next iteration of the event loop + * + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } +} + + +/***/ }), + +/***/ 47413: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var async = __nccwpck_require__(92094) + , abort = __nccwpck_require__(15953) + ; + +// API +module.exports = iterate; + +/** + * Iterates over each job object + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed + */ +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; } - var someLimit$1 = awaitify(someLimit, 4); - /** - * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. - * - * @name someSeries - * @static - * @memberOf module:Collections - * @method - * @see [async.some]{@link module:Collections.some} - * @alias anySeries - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async truth test to apply to each item - * in the collections in series. - * The iteratee should complete with a boolean `result` value. - * Invoked with (item, callback). - * @param {Function} [callback] - A callback which is called as soon as any - * iteratee returns `true`, or after all the iteratee functions have finished. - * Result will be either `true` or `false` depending on the values of the async - * tests. Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - */ - function someSeries(coll, iteratee, callback) { - return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; } - var someSeries$1 = awaitify(someSeries, 3); - /** - * Sorts a list by the results of running each `coll` value through an async - * `iteratee`. - * - * @name sortBy - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {AsyncFunction} iteratee - An async function to apply to each item in - * `coll`. - * The iteratee should complete with a value to use as the sort criteria as - * its `result`. - * Invoked with (item, callback). - * @param {Function} callback - A callback which is called after all the - * `iteratee` functions have finished, or an error occurs. Results is the items - * from the original `coll` sorted by the values returned by the `iteratee` - * calls. Invoked with (err, results). - * @returns {Promise} a promise, if no callback passed - * @example - * - * // bigfile.txt is a file that is 251100 bytes in size - * // mediumfile.txt is a file that is 11000 bytes in size - * // smallfile.txt is a file that is 121 bytes in size - * - * // asynchronous function that returns the file size in bytes - * function getFileSizeInBytes(file, callback) { - * fs.stat(file, function(err, stat) { - * if (err) { - * return callback(err); - * } - * callback(null, stat.size); - * }); - * } - * - * // Using callbacks - * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, - * function(err, results) { - * if (err) { - * console.log(err); - * } else { - * console.log(results); - * // results is now the original array of files sorted by - * // file size (ascending by default), e.g. - * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] - * } - * } - * ); - * - * // By modifying the callback parameter the - * // sorting order can be influenced: - * - * // ascending order - * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { - * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { - * if (getFileSizeErr) return callback(getFileSizeErr); - * callback(null, fileSize); - * }); - * }, function(err, results) { - * if (err) { - * console.log(err); - * } else { - * console.log(results); - * // results is now the original array of files sorted by - * // file size (ascending by default), e.g. - * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] - * } - * } - * ); - * - * // descending order - * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { - * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { - * if (getFileSizeErr) { - * return callback(getFileSizeErr); - * } - * callback(null, fileSize * -1); - * }); - * }, function(err, results) { - * if (err) { - * console.log(err); - * } else { - * console.log(results); - * // results is now the original array of files sorted by - * // file size (ascending by default), e.g. - * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] - * } - * } - * ); - * - * // Error handling - * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, - * function(err, results) { - * if (err) { - * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * } else { - * console.log(results); - * } - * } - * ); - * - * // Using Promises - * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) - * .then( results => { - * console.log(results); - * // results is now the original array of files sorted by - * // file size (ascending by default), e.g. - * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] - * }).catch( err => { - * console.log(err); - * }); - * - * // Error handling - * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) - * .then( results => { - * console.log(results); - * }).catch( err => { - * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * }); - * - * // Using async/await - * (async () => { - * try { - * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); - * console.log(results); - * // results is now the original array of files sorted by - * // file size (ascending by default), e.g. - * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] - * } - * catch (err) { - * console.log(err); - * } - * })(); - * - * // Error handling - * async () => { - * try { - * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); - * console.log(results); - * } - * catch (err) { - * console.log(err); - * // [ Error: ENOENT: no such file or directory ] - * } - * } - * - */ - function sortBy (coll, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return map$1(coll, (x, iterCb) => { - _iteratee(x, (err, criteria) => { - if (err) return iterCb(err); - iterCb(err, {value: x, criteria}); - }); - }, (err, results) => { - if (err) return callback(err); - callback(null, results.sort(comparator).map(v => v.value)); - }); + // return salvaged results + callback(error, state.results); + }); +} - function comparator(left, right) { - var a = left.criteria, b = right.criteria; - return a < b ? -1 : a > b ? 1 : 0; - } - } - var sortBy$1 = awaitify(sortBy, 3); +/** + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else + */ +function runJob(iterator, key, item, callback) +{ + var aborter; - /** - * Sets a time limit on an asynchronous function. If the function does not call - * its callback within the specified milliseconds, it will be called with a - * timeout error. The code property for the error object will be `'ETIMEDOUT'`. - * - * @name timeout - * @static - * @memberOf module:Utils - * @method - * @category Util - * @param {AsyncFunction} asyncFn - The async function to limit in time. - * @param {number} milliseconds - The specified time limit. - * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) - * to timeout Error for more information.. - * @returns {AsyncFunction} Returns a wrapped function that can be used with any - * of the control flow functions. - * Invoke this function with the same parameters as you would `asyncFunc`. - * @example - * - * function myFunction(foo, callback) { - * doAsyncTask(foo, function(err, data) { - * // handle errors - * if (err) return callback(err); - * - * // do some stuff ... - * - * // return processed data - * return callback(null, data); - * }); - * } - * - * var wrapped = async.timeout(myFunction, 1000); - * - * // call `wrapped` as you would `myFunction` - * wrapped({ bar: 'bar' }, function(err, data) { - * // if `myFunction` takes < 1000 ms to execute, `err` - * // and `data` will have their expected values - * - * // else `err` will be an Error with the code 'ETIMEDOUT' - * }); - */ - function timeout(asyncFn, milliseconds, info) { - var fn = wrapAsync(asyncFn); + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } - return initialParams((args, callback) => { - var timedOut = false; - var timer; + return aborter; +} - function timeoutCallback() { - var name = asyncFn.name || 'anonymous'; - var error = new Error('Callback function "' + name + '" timed out.'); - error.code = 'ETIMEDOUT'; - if (info) { - error.info = info; - } - timedOut = true; - callback(error); - } - args.push((...cbArgs) => { - if (!timedOut) { - callback(...cbArgs); - clearTimeout(timer); - } - }); +/***/ }), - // setup timer and call original function - timer = setTimeout(timeoutCallback, milliseconds); - fn(...args); - }); - } +/***/ 24618: +/***/ ((module) => { - function range(size) { - var result = Array(size); - while (size--) { - result[size] = size; - } - return result; - } +// API +module.exports = state; - /** - * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a - * time. - * - * @name timesLimit - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.times]{@link module:ControlFlow.times} - * @category Control Flow - * @param {number} count - The number of times to run the function. - * @param {number} limit - The maximum number of async operations at a time. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see [async.map]{@link module:Collections.map}. - * @returns {Promise} a promise, if no callback is provided - */ - function timesLimit(count, limit, iteratee, callback) { - var _iteratee = wrapAsync(iteratee); - return mapLimit$1(range(count), limit, _iteratee, callback); +/** + * Creates initial state object + * for iteration over list + * + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object + */ +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length } + ; - /** - * Calls the `iteratee` function `n` times, and accumulates results in the same - * manner you would use with [map]{@link module:Collections.map}. - * - * @name times - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.map]{@link module:Collections.map} - * @category Control Flow - * @param {number} n - The number of times to run the function. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see {@link module:Collections.map}. - * @returns {Promise} a promise, if no callback is provided - * @example - * - * // Pretend this is some complicated async factory - * var createUser = function(id, callback) { - * callback(null, { - * id: 'user' + id - * }); - * }; - * - * // generate 5 users - * async.times(5, function(n, next) { - * createUser(n, function(err, user) { - * next(err, user); - * }); - * }, function(err, users) { - * // we should now have 5 users - * }); - */ - function times (n, iteratee, callback) { - return timesLimit(n, Infinity, iteratee, callback) - } + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } - /** - * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. - * - * @name timesSeries - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.times]{@link module:ControlFlow.times} - * @category Control Flow - * @param {number} n - The number of times to run the function. - * @param {AsyncFunction} iteratee - The async function to call `n` times. - * Invoked with the iteration index and a callback: (n, next). - * @param {Function} callback - see {@link module:Collections.map}. - * @returns {Promise} a promise, if no callback is provided - */ - function timesSeries (n, iteratee, callback) { - return timesLimit(n, 1, iteratee, callback) - } + return initState; +} - /** - * A relative of `reduce`. Takes an Object or Array, and iterates over each - * element in parallel, each step potentially mutating an `accumulator` value. - * The type of the accumulator defaults to the type of collection passed in. - * - * @name transform - * @static - * @memberOf module:Collections - * @method - * @category Collection - * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. - * @param {*} [accumulator] - The initial state of the transform. If omitted, - * it will default to an empty Object or Array, depending on the type of `coll` - * @param {AsyncFunction} iteratee - A function applied to each item in the - * collection that potentially modifies the accumulator. - * Invoked with (accumulator, item, key, callback). - * @param {Function} [callback] - A callback which is called after all the - * `iteratee` functions have finished. Result is the transformed accumulator. - * Invoked with (err, result). - * @returns {Promise} a promise, if no callback provided - * @example - * - * // file1.txt is a file that is 1000 bytes in size - * // file2.txt is a file that is 2000 bytes in size - * // file3.txt is a file that is 3000 bytes in size - * - * // helper function that returns human-readable size format from bytes - * function formatBytes(bytes, decimals = 2) { - * // implementation not included for brevity - * return humanReadbleFilesize; - * } - * - * const fileList = ['file1.txt','file2.txt','file3.txt']; - * - * // asynchronous function that returns the file size, transformed to human-readable format - * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. - * function transformFileSize(acc, value, key, callback) { - * fs.stat(value, function(err, stat) { - * if (err) { - * return callback(err); - * } - * acc[key] = formatBytes(stat.size); - * callback(null); - * }); - * } - * - * // Using callbacks - * async.transform(fileList, transformFileSize, function(err, result) { - * if(err) { - * console.log(err); - * } else { - * console.log(result); - * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] - * } - * }); - * - * // Using Promises - * async.transform(fileList, transformFileSize) - * .then(result => { - * console.log(result); - * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] - * }).catch(err => { - * console.log(err); - * }); - * - * // Using async/await - * (async () => { - * try { - * let result = await async.transform(fileList, transformFileSize); - * console.log(result); - * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] - * } - * catch (err) { - * console.log(err); - * } - * })(); - * - * @example - * - * // file1.txt is a file that is 1000 bytes in size - * // file2.txt is a file that is 2000 bytes in size - * // file3.txt is a file that is 3000 bytes in size - * - * // helper function that returns human-readable size format from bytes - * function formatBytes(bytes, decimals = 2) { - * // implementation not included for brevity - * return humanReadbleFilesize; - * } - * - * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; - * - * // asynchronous function that returns the file size, transformed to human-readable format - * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. - * function transformFileSize(acc, value, key, callback) { - * fs.stat(value, function(err, stat) { - * if (err) { - * return callback(err); - * } - * acc[key] = formatBytes(stat.size); - * callback(null); - * }); - * } - * - * // Using callbacks - * async.transform(fileMap, transformFileSize, function(err, result) { - * if(err) { - * console.log(err); - * } else { - * console.log(result); - * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } - * } - * }); - * - * // Using Promises - * async.transform(fileMap, transformFileSize) - * .then(result => { - * console.log(result); - * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } - * }).catch(err => { - * console.log(err); - * }); - * - * // Using async/await - * async () => { - * try { - * let result = await async.transform(fileMap, transformFileSize); - * console.log(result); - * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } - * } - * catch (err) { - * console.log(err); - * } - * } - * - */ - function transform (coll, accumulator, iteratee, callback) { - if (arguments.length <= 3 && typeof accumulator === 'function') { - callback = iteratee; - iteratee = accumulator; - accumulator = Array.isArray(coll) ? [] : {}; - } - callback = once(callback || promiseCallback()); - var _iteratee = wrapAsync(iteratee); - eachOf$1(coll, (v, k, cb) => { - _iteratee(accumulator, v, k, cb); - }, err => callback(err, accumulator)); - return callback[PROMISE_SYMBOL] - } +/***/ }), - /** - * It runs each task in series but stops whenever any of the functions were - * successful. If one of the tasks were successful, the `callback` will be - * passed the result of the successful task. If all tasks fail, the callback - * will be passed the error and result (if any) of the final attempt. - * - * @name tryEach - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to - * run, each function is passed a `callback(err, result)` it must call on - * completion with an error `err` (which can be `null`) and an optional `result` - * value. - * @param {Function} [callback] - An optional callback which is called when one - * of the tasks has succeeded, or all have failed. It receives the `err` and - * `result` arguments of the last attempt at completing the `task`. Invoked with - * (err, results). - * @returns {Promise} a promise, if no callback is passed - * @example - * async.tryEach([ - * function getDataFromFirstWebsite(callback) { - * // Try getting the data from the first website - * callback(err, data); - * }, - * function getDataFromSecondWebsite(callback) { - * // First website failed, - * // Try getting the data from the backup website - * callback(err, data); - * } - * ], - * // optional callback - * function(err, results) { - * Now do something with the data. - * }); - * - */ - function tryEach(tasks, callback) { - var error = null; - var result; - return eachSeries$1(tasks, (task, taskCb) => { - wrapAsync(task)((err, ...args) => { - if (err === false) return taskCb(err); +/***/ 57276: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (args.length < 2) { - [result] = args; - } else { - result = args; - } - error = err; - taskCb(err ? null : {}); - }); - }, () => callback(error, result)); - } +var abort = __nccwpck_require__(15953) + , async = __nccwpck_require__(92094) + ; - var tryEach$1 = awaitify(tryEach); +// API +module.exports = terminator; - /** - * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, - * unmemoized form. Handy for testing. - * - * @name unmemoize - * @static - * @memberOf module:Utils - * @method - * @see [async.memoize]{@link module:Utils.memoize} - * @category Util - * @param {AsyncFunction} fn - the memoized function - * @returns {AsyncFunction} a function that calls the original unmemoized function - */ - function unmemoize(fn) { - return (...args) => { - return (fn.unmemoized || fn)(...args); - }; - } +/** + * Terminates jobs in the attached state context + * + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination + */ +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } - /** - * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when - * stopped, or an error occurs. - * - * @name whilst - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {AsyncFunction} test - asynchronous truth test to perform before each - * execution of `iteratee`. Invoked with (callback). - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` passes. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has failed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if no callback is passed - * @example - * - * var count = 0; - * async.whilst( - * function test(cb) { cb(null, count < 5); }, - * function iter(callback) { - * count++; - * setTimeout(function() { - * callback(null, count); - * }, 1000); - * }, - * function (err, n) { - * // 5 seconds have passed, n = 5 - * } - * ); - */ - function whilst(test, iteratee, callback) { - callback = onlyOnce(callback); - var _fn = wrapAsync(iteratee); - var _test = wrapAsync(test); - var results = []; + // fast forward iteration index + this.index = this.size; - function next(err, ...rest) { - if (err) return callback(err); - results = rest; - if (err === false) return; - _test(check); - } + // abort jobs + abort(this); - function check(err, truth) { - if (err) return callback(err); - if (err === false) return; - if (!truth) return callback(null, ...results); - _fn(next); - } + // send back results we have so far + async(callback)(null, this.results); +} - return _test(check); - } - var whilst$1 = awaitify(whilst, 3); - /** - * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when - * stopped, or an error occurs. `callback` will be passed an error and any - * arguments passed to the final `iteratee`'s callback. - * - * The inverse of [whilst]{@link module:ControlFlow.whilst}. - * - * @name until - * @static - * @memberOf module:ControlFlow - * @method - * @see [async.whilst]{@link module:ControlFlow.whilst} - * @category Control Flow - * @param {AsyncFunction} test - asynchronous truth test to perform before each - * execution of `iteratee`. Invoked with (callback). - * @param {AsyncFunction} iteratee - An async function which is called each time - * `test` fails. Invoked with (callback). - * @param {Function} [callback] - A callback which is called after the test - * function has passed and repeated execution of `iteratee` has stopped. `callback` - * will be passed an error and any arguments passed to the final `iteratee`'s - * callback. Invoked with (err, [results]); - * @returns {Promise} a promise, if a callback is not passed - * - * @example - * const results = [] - * let finished = false - * async.until(function test(cb) { - * cb(null, finished) - * }, function iter(next) { - * fetchPage(url, (err, body) => { - * if (err) return next(err) - * results = results.concat(body.objects) - * finished = !!body.next - * next(err) - * }) - * }, function done (err) { - * // all pages have been fetched - * }) - */ - function until(test, iteratee, callback) { - const _test = wrapAsync(test); - return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); - } +/***/ }), - /** - * Runs the `tasks` array of functions in series, each passing their results to - * the next in the array. However, if any of the `tasks` pass an error to their - * own callback, the next function is not executed, and the main `callback` is - * immediately called with the error. - * - * @name waterfall - * @static - * @memberOf module:ControlFlow - * @method - * @category Control Flow - * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} - * to run. - * Each function should complete with any number of `result` values. - * The `result` values will be passed as arguments, in order, to the next task. - * @param {Function} [callback] - An optional callback to run once all the - * functions have completed. This will be passed the results of the last task's - * callback. Invoked with (err, [results]). - * @returns {Promise} a promise, if a callback is omitted - * @example - * - * async.waterfall([ - * function(callback) { - * callback(null, 'one', 'two'); - * }, - * function(arg1, arg2, callback) { - * // arg1 now equals 'one' and arg2 now equals 'two' - * callback(null, 'three'); - * }, - * function(arg1, callback) { - * // arg1 now equals 'three' - * callback(null, 'done'); - * } - * ], function (err, result) { - * // result now equals 'done' - * }); - * - * // Or, with named functions: - * async.waterfall([ - * myFirstFunction, - * mySecondFunction, - * myLastFunction, - * ], function (err, result) { - * // result now equals 'done' - * }); - * function myFirstFunction(callback) { - * callback(null, 'one', 'two'); - * } - * function mySecondFunction(arg1, arg2, callback) { - * // arg1 now equals 'one' and arg2 now equals 'two' - * callback(null, 'three'); - * } - * function myLastFunction(arg1, callback) { - * // arg1 now equals 'three' - * callback(null, 'done'); - * } - */ - function waterfall (tasks, callback) { - callback = once(callback); - if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); - if (!tasks.length) return callback(); - var taskIndex = 0; +/***/ 37585: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - function nextTask(args) { - var task = wrapAsync(tasks[taskIndex++]); - task(...args, onlyOnce(next)); - } +var iterate = __nccwpck_require__(47413) + , initState = __nccwpck_require__(24618) + , terminator = __nccwpck_require__(57276) + ; - function next(err, ...args) { - if (err === false) return - if (err || taskIndex === tasks.length) { - return callback(err, ...args); - } - nextTask(args); - } +// Public API +module.exports = parallel; - nextTask([]); +/** + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; + } + + return terminator.bind(state, callback); +} + + +/***/ }), + +/***/ 75758: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var serialOrdered = __nccwpck_require__(23209); + +// Public API +module.exports = serial; + +/** + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} + + +/***/ }), + +/***/ 23209: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var iterate = __nccwpck_require__(47413) + , initState = __nccwpck_require__(24618) + , terminator = __nccwpck_require__(57276) + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; + +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); + + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; } - var waterfall$1 = awaitify(waterfall); + state.index++; - /** - * An "async function" in the context of Async is an asynchronous function with - * a variable number of parameters, with the final parameter being a callback. - * (`function (arg1, arg2, ..., callback) {}`) - * The final callback is of the form `callback(err, results...)`, which must be - * called once the function is completed. The callback should be called with a - * Error as its first argument to signal that an error occurred. - * Otherwise, if no error occurred, it should be called with `null` as the first - * argument, and any additional `result` arguments that may apply, to signal - * successful completion. - * The callback must be called exactly once, ideally on a later tick of the - * JavaScript event loop. - * - * This type of function is also referred to as a "Node-style async function", - * or a "continuation passing-style function" (CPS). Most of the methods of this - * library are themselves CPS/Node-style async functions, or functions that - * return CPS/Node-style async functions. - * - * Wherever we accept a Node-style async function, we also directly accept an - * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. - * In this case, the `async` function will not be passed a final callback - * argument, and any thrown error will be used as the `err` argument of the - * implicit callback, and the return value will be used as the `result` value. - * (i.e. a `rejected` of the returned Promise becomes the `err` callback - * argument, and a `resolved` value becomes the `result`.) - * - * Note, due to JavaScript limitations, we can only detect native `async` - * functions and not transpilied implementations. - * Your environment must have `async`/`await` support for this to work. - * (e.g. Node > v7.6, or a recent version of a modern browser). - * If you are using `async` functions through a transpiler (e.g. Babel), you - * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, - * because the `async function` will be compiled to an ordinary function that - * returns a promise. - * - * @typedef {Function} AsyncFunction - * @static - */ + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } + // done here + callback(null, state.results); + }); - var index = { - apply, - applyEach, - applyEachSeries, - asyncify, - auto, - autoInject, - cargo: cargo$1, - cargoQueue: cargo, - compose, - concat: concat$1, - concatLimit: concatLimit$1, - concatSeries: concatSeries$1, - constant: constant$1, - detect: detect$1, - detectLimit: detectLimit$1, - detectSeries: detectSeries$1, - dir, - doUntil, - doWhilst: doWhilst$1, - each, - eachLimit: eachLimit$1, - eachOf: eachOf$1, - eachOfLimit: eachOfLimit$1, - eachOfSeries: eachOfSeries$1, - eachSeries: eachSeries$1, - ensureAsync, - every: every$1, - everyLimit: everyLimit$1, - everySeries: everySeries$1, - filter: filter$1, - filterLimit: filterLimit$1, - filterSeries: filterSeries$1, - forever: forever$1, - groupBy, - groupByLimit: groupByLimit$1, - groupBySeries, - log, - map: map$1, - mapLimit: mapLimit$1, - mapSeries: mapSeries$1, - mapValues, - mapValuesLimit: mapValuesLimit$1, - mapValuesSeries, - memoize, - nextTick, - parallel, - parallelLimit, - priorityQueue, - queue, - race: race$1, - reduce: reduce$1, - reduceRight, - reflect, - reflectAll, - reject: reject$1, - rejectLimit: rejectLimit$1, - rejectSeries: rejectSeries$1, - retry, - retryable, - seq, - series, - setImmediate: setImmediate$1, - some: some$1, - someLimit: someLimit$1, - someSeries: someSeries$1, - sortBy: sortBy$1, - timeout, - times, - timesLimit, - timesSeries, - transform, - tryEach: tryEach$1, - unmemoize, - until, - waterfall: waterfall$1, - whilst: whilst$1, + return terminator.bind(state, callback); +} - // aliases - all: every$1, - allLimit: everyLimit$1, - allSeries: everySeries$1, - any: some$1, - anyLimit: someLimit$1, - anySeries: someSeries$1, - find: detect$1, - findLimit: detectLimit$1, - findSeries: detectSeries$1, - flatMap: concat$1, - flatMapLimit: concatLimit$1, - flatMapSeries: concatSeries$1, - forEach: each, - forEachSeries: eachSeries$1, - forEachLimit: eachLimit$1, - forEachOf: eachOf$1, - forEachOfSeries: eachOfSeries$1, - forEachOfLimit: eachOfLimit$1, - inject: reduce$1, - foldl: reduce$1, - foldr: reduceRight, - select: filter$1, - selectLimit: filterLimit$1, - selectSeries: filterSeries$1, - wrapSync: asyncify, - during: whilst$1, - doDuring: doWhilst$1 - }; - - exports.all = every$1; - exports.allLimit = everyLimit$1; - exports.allSeries = everySeries$1; - exports.any = some$1; - exports.anyLimit = someLimit$1; - exports.anySeries = someSeries$1; - exports.apply = apply; - exports.applyEach = applyEach; - exports.applyEachSeries = applyEachSeries; - exports.asyncify = asyncify; - exports.auto = auto; - exports.autoInject = autoInject; - exports.cargo = cargo$1; - exports.cargoQueue = cargo; - exports.compose = compose; - exports.concat = concat$1; - exports.concatLimit = concatLimit$1; - exports.concatSeries = concatSeries$1; - exports.constant = constant$1; - exports.default = index; - exports.detect = detect$1; - exports.detectLimit = detectLimit$1; - exports.detectSeries = detectSeries$1; - exports.dir = dir; - exports.doDuring = doWhilst$1; - exports.doUntil = doUntil; - exports.doWhilst = doWhilst$1; - exports.during = whilst$1; - exports.each = each; - exports.eachLimit = eachLimit$1; - exports.eachOf = eachOf$1; - exports.eachOfLimit = eachOfLimit$1; - exports.eachOfSeries = eachOfSeries$1; - exports.eachSeries = eachSeries$1; - exports.ensureAsync = ensureAsync; - exports.every = every$1; - exports.everyLimit = everyLimit$1; - exports.everySeries = everySeries$1; - exports.filter = filter$1; - exports.filterLimit = filterLimit$1; - exports.filterSeries = filterSeries$1; - exports.find = detect$1; - exports.findLimit = detectLimit$1; - exports.findSeries = detectSeries$1; - exports.flatMap = concat$1; - exports.flatMapLimit = concatLimit$1; - exports.flatMapSeries = concatSeries$1; - exports.foldl = reduce$1; - exports.foldr = reduceRight; - exports.forEach = each; - exports.forEachLimit = eachLimit$1; - exports.forEachOf = eachOf$1; - exports.forEachOfLimit = eachOfLimit$1; - exports.forEachOfSeries = eachOfSeries$1; - exports.forEachSeries = eachSeries$1; - exports.forever = forever$1; - exports.groupBy = groupBy; - exports.groupByLimit = groupByLimit$1; - exports.groupBySeries = groupBySeries; - exports.inject = reduce$1; - exports.log = log; - exports.map = map$1; - exports.mapLimit = mapLimit$1; - exports.mapSeries = mapSeries$1; - exports.mapValues = mapValues; - exports.mapValuesLimit = mapValuesLimit$1; - exports.mapValuesSeries = mapValuesSeries; - exports.memoize = memoize; - exports.nextTick = nextTick; - exports.parallel = parallel; - exports.parallelLimit = parallelLimit; - exports.priorityQueue = priorityQueue; - exports.queue = queue; - exports.race = race$1; - exports.reduce = reduce$1; - exports.reduceRight = reduceRight; - exports.reflect = reflect; - exports.reflectAll = reflectAll; - exports.reject = reject$1; - exports.rejectLimit = rejectLimit$1; - exports.rejectSeries = rejectSeries$1; - exports.retry = retry; - exports.retryable = retryable; - exports.select = filter$1; - exports.selectLimit = filterLimit$1; - exports.selectSeries = filterSeries$1; - exports.seq = seq; - exports.series = series; - exports.setImmediate = setImmediate$1; - exports.some = some$1; - exports.someLimit = someLimit$1; - exports.someSeries = someSeries$1; - exports.sortBy = sortBy$1; - exports.timeout = timeout; - exports.times = times; - exports.timesLimit = timesLimit; - exports.timesSeries = timesSeries; - exports.transform = transform; - exports.tryEach = tryEach$1; - exports.unmemoize = unmemoize; - exports.until = until; - exports.waterfall = waterfall$1; - exports.whilst = whilst$1; - exports.wrapSync = asyncify; - - Object.defineProperty(exports, '__esModule', { value: true }); - -})); - - -/***/ }), - -/***/ 41417: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = -{ - parallel : __nccwpck_require__(82181), - serial : __nccwpck_require__(25684), - serialOrdered : __nccwpck_require__(13215) -}; - - -/***/ }), - -/***/ 85203: -/***/ ((module) => { - -// API -module.exports = abort; - -/** - * Aborts leftover active jobs - * - * @param {object} state - current state object - */ -function abort(state) -{ - Object.keys(state.jobs).forEach(clean.bind(state)); - - // reset leftover jobs - state.jobs = {}; -} - -/** - * Cleans up leftover job by invoking abort function for the provided job id - * - * @this state - * @param {string|number} key - job id to abort - */ -function clean(key) -{ - if (typeof this.jobs[key] == 'function') - { - this.jobs[key](); - } -} - - -/***/ }), - -/***/ 6966: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var defer = __nccwpck_require__(36815); - -// API -module.exports = async; - -/** - * Runs provided callback asynchronously - * even if callback itself is not - * - * @param {function} callback - callback to invoke - * @returns {function} - augmented callback - */ -function async(callback) -{ - var isAsync = false; - - // check if async happened - defer(function() { isAsync = true; }); - - return function async_callback(err, result) - { - if (isAsync) - { - callback(err, result); - } - else - { - defer(function nextTick_callback() - { - callback(err, result); - }); - } - }; -} - - -/***/ }), - -/***/ 36815: -/***/ ((module) => { - -module.exports = defer; - -/** - * Runs provided function on next iteration of the event loop - * - * @param {function} fn - function to run - */ -function defer(fn) -{ - var nextTick = typeof setImmediate == 'function' - ? setImmediate - : ( - typeof process == 'object' && typeof process.nextTick == 'function' - ? process.nextTick - : null - ); - - if (nextTick) - { - nextTick(fn); - } - else - { - setTimeout(fn, 0); - } -} - - -/***/ }), - -/***/ 4799: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var async = __nccwpck_require__(6966) - , abort = __nccwpck_require__(85203) - ; - -// API -module.exports = iterate; - -/** - * Iterates over each job object - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {object} state - current job status - * @param {function} callback - invoked when all elements processed - */ -function iterate(list, iterator, state, callback) -{ - // store current index - var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; - - state.jobs[key] = runJob(iterator, key, list[key], function(error, output) - { - // don't repeat yourself - // skip secondary callbacks - if (!(key in state.jobs)) - { - return; - } - - // clean up jobs - delete state.jobs[key]; - - if (error) - { - // don't process rest of the results - // stop still active jobs - // and reset the list - abort(state); - } - else - { - state.results[key] = output; - } - - // return salvaged results - callback(error, state.results); - }); -} - -/** - * Runs iterator over provided job element - * - * @param {function} iterator - iterator to invoke - * @param {string|number} key - key/index of the element in the list of jobs - * @param {mixed} item - job description - * @param {function} callback - invoked after iterator is done with the job - * @returns {function|mixed} - job abort function or something else - */ -function runJob(iterator, key, item, callback) -{ - var aborter; - - // allow shortcut if iterator expects only two arguments - if (iterator.length == 2) - { - aborter = iterator(item, async(callback)); - } - // otherwise go with full three arguments - else - { - aborter = iterator(item, key, async(callback)); - } - - return aborter; -} - - -/***/ }), - -/***/ 39698: -/***/ ((module) => { - -// API -module.exports = state; - -/** - * Creates initial state object - * for iteration over list - * - * @param {array|object} list - list to iterate over - * @param {function|null} sortMethod - function to use for keys sort, - * or `null` to keep them as is - * @returns {object} - initial state object - */ -function state(list, sortMethod) -{ - var isNamedList = !Array.isArray(list) - , initState = - { - index : 0, - keyedList: isNamedList || sortMethod ? Object.keys(list) : null, - jobs : {}, - results : isNamedList ? {} : [], - size : isNamedList ? Object.keys(list).length : list.length - } - ; - - if (sortMethod) - { - // sort array keys based on it's values - // sort object's keys just on own merit - initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) - { - return sortMethod(list[a], list[b]); - }); - } - - return initState; -} - - -/***/ }), - -/***/ 27958: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var abort = __nccwpck_require__(85203) - , async = __nccwpck_require__(6966) - ; - -// API -module.exports = terminator; - -/** - * Terminates jobs in the attached state context - * - * @this AsyncKitState# - * @param {function} callback - final callback to invoke after termination - */ -function terminator(callback) -{ - if (!Object.keys(this.jobs).length) - { - return; - } - - // fast forward iteration index - this.index = this.size; - - // abort jobs - abort(this); - - // send back results we have so far - async(callback)(null, this.results); -} - - -/***/ }), - -/***/ 82181: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var iterate = __nccwpck_require__(4799) - , initState = __nccwpck_require__(39698) - , terminator = __nccwpck_require__(27958) - ; - -// Public API -module.exports = parallel; - -/** - * Runs iterator over provided array elements in parallel - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function parallel(list, iterator, callback) -{ - var state = initState(list); - - while (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, function(error, result) - { - if (error) - { - callback(error, result); - return; - } - - // looks like it's the last one - if (Object.keys(state.jobs).length === 0) - { - callback(null, state.results); - return; - } - }); - - state.index++; - } - - return terminator.bind(state, callback); -} - - -/***/ }), - -/***/ 25684: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var serialOrdered = __nccwpck_require__(13215); - -// Public API -module.exports = serial; - -/** - * Runs iterator over provided array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serial(list, iterator, callback) -{ - return serialOrdered(list, iterator, null, callback); -} - - -/***/ }), - -/***/ 13215: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var iterate = __nccwpck_require__(4799) - , initState = __nccwpck_require__(39698) - , terminator = __nccwpck_require__(27958) - ; - -// Public API -module.exports = serialOrdered; -// sorting helpers -module.exports.ascending = ascending; -module.exports.descending = descending; - -/** - * Runs iterator over provided sorted array elements in series - * - * @param {array|object} list - array or object (named list) to iterate over - * @param {function} iterator - iterator to run - * @param {function} sortMethod - custom sort function - * @param {function} callback - invoked when all elements processed - * @returns {function} - jobs terminator - */ -function serialOrdered(list, iterator, sortMethod, callback) -{ - var state = initState(list, sortMethod); - - iterate(list, iterator, state, function iteratorHandler(error, result) - { - if (error) - { - callback(error, result); - return; - } - - state.index++; - - // are we there yet? - if (state.index < (state['keyedList'] || list).length) - { - iterate(list, iterator, state, iteratorHandler); - return; - } - - // done here - callback(null, state.results); - }); - - return terminator.bind(state, callback); -} - -/* - * -- Sort methods - */ +/* + * -- Sort methods + */ /** * sort helper to sort array elements in ascending order @@ -55658,7 +47004,7 @@ function descending(a, b) /***/ }), -/***/ 72068: +/***/ 67252: /***/ ((module) => { function isBuffer (value) { @@ -55813,7 +47159,7 @@ module.exports = { /***/ }), -/***/ 26472: +/***/ 73819: /***/ ((module) => { "use strict"; @@ -55883,71 +47229,75 @@ function range(a, b, str) { /***/ }), -/***/ 5015: +/***/ 73108: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var register = __nccwpck_require__(45381) -var addHook = __nccwpck_require__(47254) -var removeHook = __nccwpck_require__(1243) +var register = __nccwpck_require__(9676); +var addHook = __nccwpck_require__(13862); +var removeHook = __nccwpck_require__(67704); // bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) +var bind = Function.bind; +var bindable = bind.bind(bind); -function bindApi (hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) - hook.api = { remove: removeHookRef } - hook.remove = removeHookRef - - ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind] - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) - }) +function bindApi(hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); } -function HookSingular () { - var singularHookName = 'h' +function HookSingular() { + var singularHookName = "h"; var singularHookState = { - registry: {} - } - var singularHook = register.bind(null, singularHookState, singularHookName) - bindApi(singularHook, singularHookState, singularHookName) - return singularHook + registry: {}, + }; + var singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; } -function HookCollection () { +function HookCollection() { var state = { - registry: {} - } + registry: {}, + }; - var hook = register.bind(null, state) - bindApi(hook, state) + var hook = register.bind(null, state); + bindApi(hook, state); - return hook + return hook; } -var collectionHookDeprecationMessageDisplayed = false -function Hook () { +var collectionHookDeprecationMessageDisplayed = false; +function Hook() { if (!collectionHookDeprecationMessageDisplayed) { - console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') - collectionHookDeprecationMessageDisplayed = true + console.warn( + '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' + ); + collectionHookDeprecationMessageDisplayed = true; } - return HookCollection() + return HookCollection(); } -Hook.Singular = HookSingular.bind() -Hook.Collection = HookCollection.bind() +Hook.Singular = HookSingular.bind(); +Hook.Collection = HookCollection.bind(); -module.exports = Hook +module.exports = Hook; // expose constructors as a named property for TypeScript -module.exports.Hook = Hook -module.exports.Singular = Hook.Singular -module.exports.Collection = Hook.Collection +module.exports.Hook = Hook; +module.exports.Singular = Hook.Singular; +module.exports.Collection = Hook.Collection; /***/ }), -/***/ 47254: +/***/ 13862: /***/ ((module) => { module.exports = addHook; @@ -56000,7 +47350,7 @@ function addHook(state, kind, name, hook) { /***/ }), -/***/ 45381: +/***/ 9676: /***/ ((module) => { module.exports = register; @@ -56034,7 +47384,7 @@ function register(state, name, method, options) { /***/ }), -/***/ 1243: +/***/ 67704: /***/ ((module) => { module.exports = removeHook; @@ -56060,13 +47410,13 @@ function removeHook(state, name, method) { /***/ }), -/***/ 76790: +/***/ 11052: /***/ ((module, exports, __nccwpck_require__) => { -var Chainsaw = __nccwpck_require__(40275); +var Chainsaw = __nccwpck_require__(93361); var EventEmitter = (__nccwpck_require__(82361).EventEmitter); -var Buffers = __nccwpck_require__(4467); -var Vars = __nccwpck_require__(85037); +var Buffers = __nccwpck_require__(41713); +var Vars = __nccwpck_require__(78698); var Stream = (__nccwpck_require__(12781).Stream); exports = module.exports = function (bufOrEm, eventName) { @@ -56464,7 +47814,7 @@ function words (decode) { /***/ }), -/***/ 85037: +/***/ 78698: /***/ ((module) => { module.exports = function (store) { @@ -56499,7 +47849,7 @@ module.exports = function (store) { /***/ }), -/***/ 35553: +/***/ 59111: /***/ (function(module) { /** @@ -58029,10 +49379,10 @@ module.exports = function (store) { /***/ }), -/***/ 44416: +/***/ 1190: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var balanced = __nccwpck_require__(26472); +var balanced = __nccwpck_require__(73819); module.exports = expandTop; @@ -58239,7 +49589,7 @@ function expand(str, isTop) { /***/ }), -/***/ 4467: +/***/ 41713: /***/ ((module) => { module.exports = Buffers; @@ -58515,10 +49865,10 @@ Buffers.prototype.toString = function(encoding, start, end) { /***/ }), -/***/ 40275: +/***/ 93361: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var Traverse = __nccwpck_require__(49528); +var Traverse = __nccwpck_require__(97904); var EventEmitter = (__nccwpck_require__(82361).EventEmitter); module.exports = Chainsaw; @@ -58667,12 +50017,12 @@ function upgradeChainsaw(saw) { /***/ }), -/***/ 12738: +/***/ 87105: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var util = __nccwpck_require__(73837); var Stream = (__nccwpck_require__(12781).Stream); -var DelayedStream = __nccwpck_require__(74970); +var DelayedStream = __nccwpck_require__(17386); module.exports = CombinedStream; function CombinedStream() { @@ -58882,7 +50232,7 @@ CombinedStream.prototype._emitError = function(err) { /***/ }), -/***/ 47487: +/***/ 96818: /***/ ((module) => { /** @@ -58904,7 +50254,7 @@ ArchiveEntry.prototype.isDirectory = function() {}; /***/ }), -/***/ 901: +/***/ 28466: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** @@ -58915,11 +50265,11 @@ ArchiveEntry.prototype.isDirectory = function() {}; * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT */ var inherits = (__nccwpck_require__(73837).inherits); -var isStream = __nccwpck_require__(78239); -var Transform = (__nccwpck_require__(92567).Transform); +var isStream = __nccwpck_require__(16425); +var Transform = (__nccwpck_require__(32555).Transform); -var ArchiveEntry = __nccwpck_require__(47487); -var util = __nccwpck_require__(62682); +var ArchiveEntry = __nccwpck_require__(96818); +var util = __nccwpck_require__(62277); var ArchiveOutputStream = module.exports = function(options) { if (!(this instanceof ArchiveOutputStream)) { @@ -59028,7 +50378,7 @@ ArchiveOutputStream.prototype.write = function(chunk, cb) { /***/ }), -/***/ 4563: +/***/ 50593: /***/ ((module) => { /** @@ -59106,7 +50456,7 @@ module.exports = { /***/ }), -/***/ 92638: +/***/ 76029: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** @@ -59116,7 +50466,7 @@ module.exports = { * Licensed under the MIT license. * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT */ -var zipUtil = __nccwpck_require__(74971); +var zipUtil = __nccwpck_require__(82065); var DATA_DESCRIPTOR_FLAG = 1 << 3; var ENCRYPTION_FLAG = 1 << 0; @@ -59213,7 +50563,7 @@ GeneralPurposeBit.prototype.usesUTF8ForNames = function() { /***/ }), -/***/ 95781: +/***/ 74417: /***/ ((module) => { /** @@ -59272,7 +50622,7 @@ module.exports = { /***/ }), -/***/ 74971: +/***/ 82065: /***/ ((module) => { /** @@ -59352,7 +50702,7 @@ util.toDosTime = function(d) { /***/ }), -/***/ 29002: +/***/ 79050: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** @@ -59363,14 +50713,14 @@ util.toDosTime = function(d) { * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT */ var inherits = (__nccwpck_require__(73837).inherits); -var normalizePath = __nccwpck_require__(51587); +var normalizePath = __nccwpck_require__(54882); -var ArchiveEntry = __nccwpck_require__(47487); -var GeneralPurposeBit = __nccwpck_require__(92638); -var UnixStat = __nccwpck_require__(95781); +var ArchiveEntry = __nccwpck_require__(96818); +var GeneralPurposeBit = __nccwpck_require__(76029); +var UnixStat = __nccwpck_require__(74417); -var constants = __nccwpck_require__(4563); -var zipUtil = __nccwpck_require__(74971); +var constants = __nccwpck_require__(50593); +var zipUtil = __nccwpck_require__(82065); var ZipArchiveEntry = module.exports = function(name) { if (!(this instanceof ZipArchiveEntry)) { @@ -59772,7 +51122,7 @@ ZipArchiveEntry.prototype.isZip64 = function() { /***/ }), -/***/ 91645: +/***/ 83681: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** @@ -59783,17 +51133,17 @@ ZipArchiveEntry.prototype.isZip64 = function() { * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT */ var inherits = (__nccwpck_require__(73837).inherits); -var crc32 = __nccwpck_require__(34198); -var {CRC32Stream} = __nccwpck_require__(72772); -var {DeflateCRC32Stream} = __nccwpck_require__(72772); +var crc32 = __nccwpck_require__(5702); +var {CRC32Stream} = __nccwpck_require__(36640); +var {DeflateCRC32Stream} = __nccwpck_require__(36640); -var ArchiveOutputStream = __nccwpck_require__(901); -var ZipArchiveEntry = __nccwpck_require__(29002); -var GeneralPurposeBit = __nccwpck_require__(92638); +var ArchiveOutputStream = __nccwpck_require__(28466); +var ZipArchiveEntry = __nccwpck_require__(79050); +var GeneralPurposeBit = __nccwpck_require__(76029); -var constants = __nccwpck_require__(4563); -var util = __nccwpck_require__(62682); -var zipUtil = __nccwpck_require__(74971); +var constants = __nccwpck_require__(50593); +var util = __nccwpck_require__(62277); +var zipUtil = __nccwpck_require__(82065); var ZipArchiveOutputStream = module.exports = function(options) { if (!(this instanceof ZipArchiveOutputStream)) { @@ -60216,7 +51566,7 @@ ZipArchiveOutputStream.prototype.setComment = function(comment) { /***/ }), -/***/ 20786: +/***/ 69046: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** @@ -60227,15 +51577,15 @@ ZipArchiveOutputStream.prototype.setComment = function(comment) { * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT */ module.exports = { - ArchiveEntry: __nccwpck_require__(47487), - ZipArchiveEntry: __nccwpck_require__(29002), - ArchiveOutputStream: __nccwpck_require__(901), - ZipArchiveOutputStream: __nccwpck_require__(91645) + ArchiveEntry: __nccwpck_require__(96818), + ZipArchiveEntry: __nccwpck_require__(79050), + ArchiveOutputStream: __nccwpck_require__(28466), + ZipArchiveOutputStream: __nccwpck_require__(83681) }; /***/ }), -/***/ 62682: +/***/ 62277: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** @@ -60246,8 +51596,8 @@ module.exports = { * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT */ var Stream = (__nccwpck_require__(12781).Stream); -var PassThrough = (__nccwpck_require__(92567).PassThrough); -var isStream = __nccwpck_require__(78239); +var PassThrough = (__nccwpck_require__(32555).PassThrough); +var isStream = __nccwpck_require__(16425); var util = module.exports = {}; @@ -60268,7 +51618,7 @@ util.normalizeInputSource = function(source) { /***/ }), -/***/ 76844: +/***/ 68610: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { // Copyright Joyent, Inc. and other Node contributors. @@ -60382,7 +51732,7 @@ function objectToString(o) { /***/ }), -/***/ 34198: +/***/ 5702: /***/ ((__unused_webpack_module, exports) => { /*! crc32.js (C) 2014-present SheetJS -- http://sheetjs.com */ @@ -60496,7 +51846,7 @@ CRC32.str = crc32_str; /***/ }), -/***/ 29722: +/***/ 90047: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -60510,9 +51860,9 @@ CRC32.str = crc32_str; -const {Transform} = __nccwpck_require__(92567); +const {Transform} = __nccwpck_require__(32555); -const crc32 = __nccwpck_require__(34198); +const crc32 = __nccwpck_require__(5702); class CRC32Stream extends Transform { constructor(options) { @@ -60552,7 +51902,7 @@ module.exports = CRC32Stream; /***/ }), -/***/ 12701: +/***/ 62764: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -60568,7 +51918,7 @@ module.exports = CRC32Stream; const {DeflateRaw} = __nccwpck_require__(59796); -const crc32 = __nccwpck_require__(34198); +const crc32 = __nccwpck_require__(5702); class DeflateCRC32Stream extends DeflateRaw { constructor(options) { @@ -60622,7 +51972,7 @@ module.exports = DeflateCRC32Stream; /***/ }), -/***/ 72772: +/***/ 36640: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -60637,14 +51987,858 @@ module.exports = DeflateCRC32Stream; module.exports = { - CRC32Stream: __nccwpck_require__(29722), - DeflateCRC32Stream: __nccwpck_require__(12701) + CRC32Stream: __nccwpck_require__(90047), + DeflateCRC32Stream: __nccwpck_require__(62764) } /***/ }), -/***/ 74970: +/***/ 93433: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = __nccwpck_require__(31807)(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + + +/***/ }), + +/***/ 31807: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __nccwpck_require__(10154); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; + + +/***/ }), + +/***/ 97785: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = __nccwpck_require__(93433); +} else { + module.exports = __nccwpck_require__(8347); +} + + +/***/ }), + +/***/ 8347: +/***/ ((module, exports, __nccwpck_require__) => { + +/** + * Module dependencies. + */ + +const tty = __nccwpck_require__(76224); +const util = __nccwpck_require__(73837); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = __nccwpck_require__(55441); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.formatWithOptions()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.formatWithOptions(exports.inspectOpts, ...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = __nccwpck_require__(31807)(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + + +/***/ }), + +/***/ 17386: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Stream = (__nccwpck_require__(12781).Stream); @@ -60758,7 +52952,7 @@ DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { /***/ }), -/***/ 76604: +/***/ 25800: /***/ ((__unused_webpack_module, exports) => { "use strict"; @@ -60786,7 +52980,7 @@ exports.Deprecation = Deprecation; /***/ }), -/***/ 19166: +/***/ 27965: /***/ ((module) => { "use strict"; @@ -61379,7 +53573,7 @@ module.exports = DotObject /***/ }), -/***/ 79752: +/***/ 68963: /***/ ((module, exports) => { "use strict"; @@ -62258,7 +54452,7 @@ module.exports.defineEventAttribute = defineEventAttribute /***/ }), -/***/ 59251: +/***/ 51362: /***/ ((module) => { module.exports = class FixedFIFO { @@ -62304,10 +54498,10 @@ module.exports = class FixedFIFO { /***/ }), -/***/ 85923: +/***/ 93406: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const FixedFIFO = __nccwpck_require__(59251) +const FixedFIFO = __nccwpck_require__(51362) module.exports = class FastFIFO { constructor (hwm) { @@ -62359,3600 +54553,4856 @@ module.exports = class FastFIFO { /***/ }), -/***/ 18800: +/***/ 32370: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var debug; +"use strict"; -module.exports = function () { - if (!debug) { - try { - /* eslint global-require: off */ - debug = __nccwpck_require__(65168)("follow-redirects"); - } - catch (error) { /* */ } - if (typeof debug !== "function") { - debug = function () { /* */ }; + +const validator = __nccwpck_require__(37717); +const XMLParser = __nccwpck_require__(61919); +const XMLBuilder = __nccwpck_require__(17430); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} + +/***/ }), + +/***/ 85674: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); } + matches.push(allmatches); + match = regex.exec(string); } - debug.apply(null, arguments); + return matches; }; +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; -/***/ }), +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; -/***/ 87758: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; -var url = __nccwpck_require__(57310); -var URL = url.URL; -var http = __nccwpck_require__(13685); -var https = __nccwpck_require__(95687); -var Writable = (__nccwpck_require__(12781).Writable); -var assert = __nccwpck_require__(39491); -var debug = __nccwpck_require__(18800); +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ -// Create handlers that pass events from native requests -var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; -var eventHandlers = Object.create(null); -events.forEach(function (event) { - eventHandlers[event] = function (arg1, arg2, arg3) { - this._redirectable.emit(event, arg1, arg2, arg3); - }; -}); +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; -var InvalidUrlError = createErrorType( - "ERR_INVALID_URL", - "Invalid URL", - TypeError -); -// Error types with codes -var RedirectionError = createErrorType( - "ERR_FR_REDIRECTION_FAILURE", - "Redirected request failed" -); -var TooManyRedirectsError = createErrorType( - "ERR_FR_TOO_MANY_REDIRECTS", - "Maximum number of redirects exceeded" -); -var MaxBodyLengthExceededError = createErrorType( - "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", - "Request body larger than maxBodyLength limit" -); -var WriteAfterEndError = createErrorType( - "ERR_STREAM_WRITE_AFTER_END", - "write after end" -); +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; -// istanbul ignore next -var destroy = Writable.prototype.destroy || noop; +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; -// An HTTP(S) request that can be redirected -function RedirectableRequest(options, responseCallback) { - // Initialize the request - Writable.call(this); - this._sanitizeOptions(options); - this._options = options; - this._ended = false; - this._ending = false; - this._redirectCount = 0; - this._redirects = []; - this._requestBodyLength = 0; - this._requestBodyBuffers = []; - // Attach a callback if passed - if (responseCallback) { - this.on("response", responseCallback); - } +/***/ }), - // React to responses of native requests - var self = this; - this._onNativeResponse = function (response) { - self._processResponse(response); - }; +/***/ 37717: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - // Perform the first request - this._performRequest(); -} -RedirectableRequest.prototype = Object.create(Writable.prototype); +"use strict"; -RedirectableRequest.prototype.abort = function () { - destroyRequest(this._currentRequest); - this._currentRequest.abort(); - this.emit("abort"); -}; -RedirectableRequest.prototype.destroy = function (error) { - destroyRequest(this._currentRequest, error); - destroy.call(this, error); - return this; +const util = __nccwpck_require__(85674); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] }; -// Writes buffered data to the current native request -RedirectableRequest.prototype.write = function (data, encoding, callback) { - // Writing is not allowed if end has been called - if (this._ending) { - throw new WriteAfterEndError(); - } +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); - // Validate input and shift parameters if necessary - if (!isString(data) && !isBuffer(data)) { - throw new TypeError("data should be a string, Buffer or Uint8Array"); - } - if (isFunction(encoding)) { - callback = encoding; - encoding = null; + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); - // Ignore empty buffers, since writing them doesn't invoke the callback - // https://github.com/nodejs/node/issues/22066 - if (data.length === 0) { - if (callback) { - callback(); + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); } - return; } - // Only write when we don't exceed the maximum body length - if (this._requestBodyLength + data.length <= this._options.maxBodyLength) { - this._requestBodyLength += data.length; - this._requestBodyBuffers.push({ data: data, encoding: encoding }); - this._currentRequest.write(data, encoding, callback); - } - // Error when we exceed the maximum body length - else { - this.emit("error", new MaxBodyLengthExceededError()); - this.abort(); + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); } + + return true; }; -// Ends the current native request -RedirectableRequest.prototype.end = function (data, encoding, callback) { - // Shift parameters if necessary - if (isFunction(data)) { - callback = data; - data = encoding = null; +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } } - else if (isFunction(encoding)) { - callback = encoding; - encoding = null; + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } } - // Write data if needed and end - if (!data) { - this._ended = this._ending = true; - this._currentRequest.end(null, null, callback); + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; } - else { - var self = this; - var currentRequest = this._currentRequest; - this.write(data, encoding, function () { - self._ended = true; - currentRequest.end(null, null, callback); - }); - this._ending = true; + if (startChar !== '') { + return false; } -}; -// Sets a header value on the current native request -RedirectableRequest.prototype.setHeader = function (name, value) { - this._options.headers[name] = value; - this._currentRequest.setHeader(name, value); -}; + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} -// Clears a header value on the current native request -RedirectableRequest.prototype.removeHeader = function (name) { - delete this._options.headers[name]; - this._currentRequest.removeHeader(name); -}; +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); -// Global timeout for all underlying requests -RedirectableRequest.prototype.setTimeout = function (msecs, callback) { - var self = this; +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" - // Destroys the socket on timeout - function destroyOnTimeout(socket) { - socket.setTimeout(msecs); - socket.removeListener("timeout", socket.destroy); - socket.addListener("timeout", socket.destroy); - } +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); - // Sets up a timer to trigger a timeout event - function startTimer(socket) { - if (self._timeout) { - clearTimeout(self._timeout); - } - self._timeout = setTimeout(function () { - self.emit("timeout"); - clearTimer(); - }, msecs); - destroyOnTimeout(socket); - } + //if(attrStr.trim().length === 0) return true; //empty string - // Stops a timeout from triggering - function clearTimer() { - // Clear the timeout - if (self._timeout) { - clearTimeout(self._timeout); - self._timeout = null; - } + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; - // Clean up all attached listeners - self.removeListener("abort", clearTimer); - self.removeListener("error", clearTimer); - self.removeListener("response", clearTimer); - self.removeListener("close", clearTimer); - if (callback) { - self.removeListener("timeout", callback); + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); } - if (!self.socket) { - self._currentRequest.removeListener("socket", startTimer); + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); } } - // Attach callback if passed - if (callback) { - this.on("timeout", callback); + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; } + return -1; +} - // Start the timer if or when the socket is opened - if (this.socket) { - startTimer(this.socket); +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); } - else { - this._currentRequest.once("socket", startTimer); + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; } + return i; +} - // Clean up on events - this.on("socket", destroyOnTimeout); - this.on("abort", clearTimer); - this.on("error", clearTimer); - this.on("response", clearTimer); - this.on("close", clearTimer); +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} - return this; -}; +function validateAttrName(attrName) { + return util.isName(attrName); +} -// Proxy all other public ClientRequest methods -[ - "flushHeaders", "getHeader", - "setNoDelay", "setSocketKeepAlive", -].forEach(function (method) { - RedirectableRequest.prototype[method] = function (a, b) { - return this._currentRequest[method](a, b); +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 }; -}); +} -// Proxy all public ClientRequest properties -["aborted", "connection", "socket"].forEach(function (property) { - Object.defineProperty(RedirectableRequest.prototype, property, { - get: function () { return this._currentRequest[property]; }, - }); -}); +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} -RedirectableRequest.prototype._sanitizeOptions = function (options) { - // Ensure headers are always present - if (!options.headers) { - options.headers = {}; - } - // Since http.request treats host as an alias of hostname, - // but the url module interprets host as hostname plus port, - // eliminate the host property to avoid confusion. - if (options.host) { - // Use hostname if set, because it has precedence - if (!options.hostname) { - options.hostname = options.host; - } - delete options.host; - } +/***/ }), - // Complete the URL object when necessary - if (!options.pathname && options.path) { - var searchPos = options.path.indexOf("?"); - if (searchPos < 0) { - options.pathname = options.path; - } - else { - options.pathname = options.path.substring(0, searchPos); - options.search = options.path.substring(searchPos); - } - } -}; +/***/ 17430: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; -// Executes the next native request (initial or redirect) -RedirectableRequest.prototype._performRequest = function () { - // Load the native protocol - var protocol = this._options.protocol; - var nativeProtocol = this._options.nativeProtocols[protocol]; - if (!nativeProtocol) { - this.emit("error", new TypeError("Unsupported protocol " + protocol)); - return; +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(69947); + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; } - // If specified, use the agent corresponding to the protocol - // (HTTP and HTTPS use different types of agents) - if (this._options.agents) { - var scheme = protocol.slice(0, -1); - this._options.agent = this._options.agents[scheme]; - } + this.processTextOrObjNode = processTextOrObjNode - // Create the native request and set up its event handlers - var request = this._currentRequest = - nativeProtocol.request(this._options, this._onNativeResponse); - request._redirectable = this; - for (var event of events) { - request.on(event, eventHandlers[event]); + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; } +} - // RFC7230§5.3.1: When making a request directly to an origin server, […] - // a client MUST send only the absolute path […] as the request-target. - this._currentUrl = /^\//.test(this._options.path) ? - url.format(this._options) : - // When making a request to a proxy, […] - // a client MUST send the target URI in absolute-form […]. - this._options.path; +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; + } +}; - // End a redirected request - // (The first request must be ended explicitly with RedirectableRequest#end) - if (this._isRedirect) { - // Write the request entity and end - var i = 0; - var self = this; - var buffers = this._requestBodyBuffers; - (function writeNext(error) { - // Only write if this request has not been redirected yet - /* istanbul ignore else */ - if (request === self._currentRequest) { - // Report any write errors - /* istanbul ignore if */ - if (error) { - self.emit("error", error); - } - // Write the next buffer if there are still left - else if (i < buffers.length) { - var buffer = buffers[i++]; - /* istanbul ignore else */ - if (!request.finished) { - request.write(buffer.data, buffer.encoding, writeNext); +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup ){ + listTagVal += this.j2x(item, level + 1).val; + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) } + } else { + listTagVal += this.buildTextValNode(item, key, '', level); } - // End the request if `end` has been called on us - else if (self._ended) { - request.end(); + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, '', level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) } - }()); + } } + return {attrStr: attrStr, val: val}; }; -// Processes a response from the current native request -RedirectableRequest.prototype._processResponse = function (response) { - // Store the redirected response - var statusCode = response.statusCode; - if (this._options.trackRedirects) { - this._redirects.push({ - url: this._currentUrl, - headers: response.headers, - statusCode: statusCode, - }); - } +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} - // RFC7231§6.4: The 3xx (Redirection) class of status code indicates - // that further action needs to be taken by the user agent in order to - // fulfill the request. If a Location header field is provided, - // the user agent MAY automatically redirect its request to the URI - // referenced by the Location field value, - // even if the specific status code is not understood. +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} - // If the response is not a redirect; return it as-is - var location = response.headers.location; - if (!location || this._options.followRedirects === false || - statusCode < 300 || statusCode >= 400) { - response.responseUrl = this._currentUrl; - response.redirects = this._redirects; - this.emit("response", response); +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ - // Clean up - this._requestBodyBuffers = []; - return; + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } } +} - // The response is a redirect, so abort the current request - destroyRequest(this._currentRequest); - // Discard the remainder of the response to avoid waiting for data - response.destroy(); +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `> this._options.maxRedirects) { - this.emit("error", new TooManyRedirectsError()); - return; +function buildEmptyObjNode(val, key, attrStr, level) { + if (val !== '') { + return this.buildObjectNode(val, key, attrStr, level); + } else { + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + '/' + this.tagEndChar; + // return this.buildTagStr(level,key, attrStr); + } } +} - // Store the request headers if applicable - var requestHeaders; - var beforeRedirect = this._options.beforeRedirect; - if (beforeRedirect) { - requestHeaders = Object.assign({ - // The Host header was set by nativeProtocol.request - Host: response.req.getHeader("host"), - }, this._options.headers); +Builder.prototype.buildTextValNode = function(val, key, attrStr, level) { + if (this.options.cdataPropName !== false && key === this.options.cdataPropName) { + return this.indentate(level) + `` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { + +const EOL = "\n"; + +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; } - this._sanitizeOptions(this._options); - } + return arrToStr(jArray, options, "", indentation); +} - // Perform the redirected request - try { - this._performRequest(); - } - catch (cause) { - this.emit("error", new RedirectionError({ cause: cause })); - } -}; +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; -// Wraps the key/value object of protocols with redirect functionality -function wrap(protocols) { - // Default settings - var exports = { - maxRedirects: 21, - maxBodyLength: 10 * 1024 * 1024, - }; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; - // Wrap each protocol - var nativeProtocols = {}; - Object.keys(protocols).forEach(function (scheme) { - var protocol = scheme + ":"; - var nativeProtocol = nativeProtocols[protocol] = protocols[scheme]; - var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol); + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; - // Executes a request, following redirects - function request(input, options, callback) { - // Parse parameters - if (isString(input)) { - var parsed; - try { - parsed = urlToOptions(new URL(input)); - } - catch (err) { - /* istanbul ignore next */ - parsed = url.parse(input); + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; } - if (!isString(parsed.protocol)) { - throw new InvalidUrlError({ input }); + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; } - input = parsed; - } - else if (URL && (input instanceof URL)) { - input = urlToOptions(input); - } - else { - callback = options; - options = input; - input = { protocol: protocol }; - } - if (isFunction(options)) { - callback = options; - options = null; - } + isPreviousElementTag = true; + } - // Set defaults - options = Object.assign({ - maxRedirects: exports.maxRedirects, - maxBodyLength: exports.maxBodyLength, - }, input, options); - options.nativeProtocols = nativeProtocols; - if (!isString(options.host) && !isString(options.hostname)) { - options.hostname = "::1"; - } + return xmlStr; +} - assert.equal(options.protocol, protocol, "protocol mismatch"); - debug("options", options); - return new RedirectableRequest(options, callback); +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; } +} - // Executes a GET request, following redirects - function get(input, options, callback) { - var wrappedRequest = wrappedProtocol.request(input, options, callback); - wrappedRequest.end(); - return wrappedRequest; +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } } - - // Expose the properties on the wrapped protocol - Object.defineProperties(wrappedProtocol, { - request: { value: request, configurable: true, enumerable: true, writable: true }, - get: { value: get, configurable: true, enumerable: true, writable: true }, - }); - }); - return exports; + return attrStr; } -/* istanbul ignore next */ -function noop() { /* empty */ } - -// from https://github.com/nodejs/node/blob/master/lib/internal/url.js -function urlToOptions(urlObject) { - var options = { - protocol: urlObject.protocol, - hostname: urlObject.hostname.startsWith("[") ? - /* istanbul ignore next */ - urlObject.hostname.slice(1, -1) : - urlObject.hostname, - hash: urlObject.hash, - search: urlObject.search, - pathname: urlObject.pathname, - path: urlObject.pathname + urlObject.search, - href: urlObject.href, - }; - if (urlObject.port !== "") { - options.port = Number(urlObject.port); - } - return options; +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; } -function removeMatchingHeaders(regex, headers) { - var lastValue; - for (var header in headers) { - if (regex.test(header)) { - lastValue = headers[header]; - delete headers[header]; +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } } - } - return (lastValue === null || typeof lastValue === "undefined") ? - undefined : String(lastValue).trim(); + return textValue; } +module.exports = toXml; -function createErrorType(code, message, baseClass) { - // Create constructor - function CustomError(properties) { - Error.captureStackTrace(this, this.constructor); - Object.assign(this, properties || {}); - this.code = code; - this.message = this.cause ? message + ": " + this.cause.message : message; - } - // Attach constructor and set default properties - CustomError.prototype = new (baseClass || Error)(); - CustomError.prototype.constructor = CustomError; - CustomError.prototype.name = "Error [" + code + "]"; - return CustomError; -} +/***/ }), -function destroyRequest(request, error) { - for (var event of events) { - request.removeListener(event, eventHandlers[event]); - } - request.on("error", noop); - request.destroy(error); +/***/ 18215: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(85674); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; } -function isSubdomain(subdomain, domain) { - assert(isString(subdomain) && isString(domain)); - var dot = subdomain.length - domain.length - 1; - return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain); +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; } -function isString(value) { - return typeof value === "string" || value instanceof String; +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false } -function isFunction(value) { - return typeof value === "function"; +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false } -function isBuffer(value) { - return typeof value === "object" && ("length" in value); +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); } -// Exports -module.exports = wrap({ http: http, https: https }); -module.exports.wrap = wrap; +module.exports = readDocType; /***/ }), -/***/ 85544: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1839: +/***/ ((__unused_webpack_module, exports) => { -var CombinedStream = __nccwpck_require__(12738); -var util = __nccwpck_require__(73837); -var path = __nccwpck_require__(71017); -var http = __nccwpck_require__(13685); -var https = __nccwpck_require__(95687); -var parseUrl = (__nccwpck_require__(57310).parse); -var fs = __nccwpck_require__(57147); -var Stream = (__nccwpck_require__(12781).Stream); -var mime = __nccwpck_require__(24651); -var asynckit = __nccwpck_require__(41417); -var populate = __nccwpck_require__(13110); -// Public API -module.exports = FormData; +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; -// make it a Stream -util.inherits(FormData, CombinedStream); +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; -/** - * Create readable "multipart/form-data" streams. - * Can be used to submit forms - * and file uploads to other web applications. - * - * @constructor - * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream - */ -function FormData(options) { - if (!(this instanceof FormData)) { - return new FormData(options); - } +/***/ }), - this._overheadLength = 0; - this._valueLength = 0; - this._valuesToMeasure = []; +/***/ 37270: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - CombinedStream.call(this); +"use strict"; - options = options || {}; - for (var option in options) { - this[option] = options[option]; +///@ts-check + +const util = __nccwpck_require__(85674); +const xmlNode = __nccwpck_require__(49778); +const readDocType = __nccwpck_require__(18215); +const toNumber = __nccwpck_require__(98307); + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; } -} -FormData.LINE_BREAK = '\r\n'; -FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; +} -FormData.prototype.append = function(field, value, options) { +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} - options = options || {}; +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} - // allow filename as single option - if (typeof options == 'string') { - options = {filename: options}; +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } } + return tagname; +} - var append = CombinedStream.prototype.append.bind(this); +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); - // all that streamy business can't handle numbers - if (typeof value == 'number') { - value = '' + value; - } +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); - // https://github.com/felixge/node-form-data/issues/38 - if (util.isArray(value)) { - // Please convert your array into string - // the way web server expects it - this._error(new Error('Arrays are not supported.')); - return; - } + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } - var header = this._multiPartHeader(field, value, options); - var footer = this._multiPartFooter(); + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } - append(header); - append(value); - append(footer); + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } - // pass along options.knownLength - this._trackLength(header, value, options); -}; + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); -FormData.prototype._trackLength = function(header, value, options) { - var valueLength = 0; + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { - // used w/ getLengthSync(), when length is known. - // e.g. for streaming directly from a remote server, - // w/ a known file a size, and not wanting to wait for - // incoming file to finish to get its size. - if (options.knownLength != null) { - valueLength += +options.knownLength; - } else if (Buffer.isBuffer(value)) { - valueLength = value.length; - } else if (typeof value === 'string') { - valueLength = Buffer.byteLength(value); - } + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); - this._valueLength += valueLength; + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ - // @check why add CRLF? does this account for custom/multiple CRLFs? - this._overheadLength += - Buffer.byteLength(header) + - FormData.LINE_BREAK.length; + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) - // empty or either doesn't have path or not an http response or not a stream - if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { - return; - } + } - // no need to bother with the length - if (!options.knownLength) { - this._valuesToMeasure.push(value); - } -}; -FormData.prototype._lengthRetriever = function(value, callback) { + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); - if (value.hasOwnProperty('fd')) { + textData = this.saveTextToParentTag(textData, currentNode, jPath); - // take read range into a account - // `end` = Infinity –> read file till the end - // - // TODO: Looks like there is bug in Node fs.createReadStream - // it doesn't respect `end` options without `start` options - // Fix it when node fixes it. - // https://github.com/joyent/node/issues/7819 - if (value.end != undefined && value.end != Infinity && value.start != undefined) { + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); - // when end specified - // no need to calculate range - // inclusive, starts with 0 - callback(null, value.end + 1 - (value.start ? value.start : 0)); + textData = this.saveTextToParentTag(textData, currentNode, jPath); - // not that fast snoopy - } else { - // still need to fetch file size from fs - fs.stat(value.path, function(err, stat) { + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; - var fileSize; + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } - if (err) { - callback(err); - return; + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } - // update final size based on the range options - fileSize = stat.size - (value.start ? value.start : 0); - callback(null, fileSize); - }); + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; } + } + return xmlObj.child; +} - // or http response - } else if (value.hasOwnProperty('httpVersion')) { - callback(null, +value.headers['content-length']); +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} - // or request stream http://github.com/mikeal/request - } else if (value.hasOwnProperty('httpModule')) { - // wait till response come back - value.on('response', function(response) { - value.pause(); - callback(null, +response.headers['content-length']); - }); - value.resume(); +const replaceEntitiesValue = function(val){ - // something else - } else { - callback('Unknown stream'); + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); } -}; + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); -FormData.prototype._multiPartHeader = function(field, value, options) { - // custom header specified (as string)? - // it becomes responsible for boundary - // (e.g. to handle extra CRLFs on .NET servers) - if (typeof options.header == 'string') { - return options.header; + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; } + return textData; +} - var contentDisposition = this._getContentDisposition(value, options); - var contentType = this._getContentType(value, options); +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} - var contents = ''; - var headers = { - // add custom disposition as third element or keep it two elements if not - 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), - // if no content type. allow it to be empty array - 'Content-Type': [].concat(contentType || []) - }; +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} - // allow custom headers. - if (typeof options.header == 'object') { - populate(headers, options.header); +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; } +} - var header; - for (var prop in headers) { - if (!headers.hasOwnProperty(prop)) continue; - header = headers[prop]; +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } - // skip nullish headers. - if (header == null) { - continue; + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); } + } - // convert all headers to arrays. - if (!Array.isArray(header)) { - header = [header]; - } + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') - // add non-empty headers. - if (header.length) { - contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; } } +} - return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; -}; -FormData.prototype._getContentDisposition = function(value, options) { +module.exports = OrderedObjParser; - var filename - , contentDisposition - ; - if (typeof options.filepath === 'string') { - // custom filepath for relative paths - filename = path.normalize(options.filepath).replace(/\\/g, '/'); - } else if (options.filename || value.name || value.path) { - // custom filename take precedence - // formidable and the browser add a name property - // fs- and request- streams have path property - filename = path.basename(options.filename || value.name || value.path); - } else if (value.readable && value.hasOwnProperty('httpVersion')) { - // or try http response - filename = path.basename(value.client._httpMessage.path || ''); - } +/***/ }), - if (filename) { - contentDisposition = 'filename="' + filename + '"'; - } - - return contentDisposition; -}; - -FormData.prototype._getContentType = function(value, options) { - - // use custom content-type above all - var contentType = options.contentType; +/***/ 61919: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // or try `name` from formidable, browser - if (!contentType && value.name) { - contentType = mime.lookup(value.name); - } +const { buildOptions} = __nccwpck_require__(1839); +const OrderedObjParser = __nccwpck_require__(37270); +const { prettify} = __nccwpck_require__(44105); +const validator = __nccwpck_require__(37717); - // or try `path` from fs-, request- streams - if (!contentType && value.path) { - contentType = mime.lookup(value.path); - } +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } - // or if it's http-reponse - if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { - contentType = value.headers['content-type']; - } + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} - // or guess it from the filepath or filename - if (!contentType && (options.filepath || options.filename)) { - contentType = mime.lookup(options.filepath || options.filename); - } +module.exports = XMLParser; - // fallback to the default content type if `value` is not simple value - if (!contentType && typeof value == 'object') { - contentType = FormData.DEFAULT_CONTENT_TYPE; - } +/***/ }), - return contentType; -}; +/***/ 44105: +/***/ ((__unused_webpack_module, exports) => { -FormData.prototype._multiPartFooter = function() { - return function(next) { - var footer = FormData.LINE_BREAK; +"use strict"; - var lastPart = (this._streams.length === 0); - if (lastPart) { - footer += this._lastBoundary(); - } - next(footer); - }.bind(this); -}; +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} -FormData.prototype._lastBoundary = function() { - return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; -}; +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); -FormData.prototype.getHeaders = function(userHeaders) { - var header; - var formHeaders = { - 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() - }; + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } - for (header in userHeaders) { - if (userHeaders.hasOwnProperty(header)) { - formHeaders[header.toLowerCase()] = userHeaders[header]; + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } } + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} - return formHeaders; -}; - -FormData.prototype.setBoundary = function(boundary) { - this._boundary = boundary; -}; - -FormData.prototype.getBoundary = function() { - if (!this._boundary) { - this._generateBoundary(); +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; } +} - return this._boundary; -}; - -FormData.prototype.getBuffer = function() { - var dataBuffer = new Buffer.alloc( 0 ); - var boundary = this.getBoundary(); - - // Create the form content. Add Line breaks to the end of data. - for (var i = 0, len = this._streams.length; i < len; i++) { - if (typeof this._streams[i] !== 'function') { - - // Add content to the buffer. - if(Buffer.isBuffer(this._streams[i])) { - dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); - }else { - dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); - } - - // Add break after content. - if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { - dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; } } } +} - // Add the footer and return the Buffer object. - return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); -}; - -FormData.prototype._generateBoundary = function() { - // This generates a 50 character boundary similar to those used by Firefox. - // They are optimized for boyer-moore parsing. - var boundary = '--------------------------'; - for (var i = 0; i < 24; i++) { - boundary += Math.floor(Math.random() * 10).toString(16); +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; } - this._boundary = boundary; -}; - -// Note: getLengthSync DOESN'T calculate streams length -// As workaround one can calculate file size manually -// and add it as knownLength option -FormData.prototype.getLengthSync = function() { - var knownLength = this._overheadLength + this._valueLength; - - // Don't get confused, there are 3 "internal" streams for each keyval pair - // so it basically checks if there is any value added to the form - if (this._streams.length) { - knownLength += this._lastBoundary().length; + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; } - // https://github.com/form-data/form-data/issues/40 - if (!this.hasKnownLength()) { - // Some async length retrievers are present - // therefore synchronous length calculation is false. - // Please use getLength(callback) to get proper length - this._error(new Error('Cannot calculate proper length in synchronous way.')); - } + return false; +} +exports.prettify = prettify; - return knownLength; -}; -// Public API to check if length of added values is known -// https://github.com/form-data/form-data/issues/196 -// https://github.com/form-data/form-data/issues/262 -FormData.prototype.hasKnownLength = function() { - var hasKnownLength = true; +/***/ }), - if (this._valuesToMeasure.length) { - hasKnownLength = false; - } +/***/ 49778: +/***/ ((module) => { - return hasKnownLength; -}; +"use strict"; -FormData.prototype.getLength = function(cb) { - var knownLength = this._overheadLength + this._valueLength; - if (this._streams.length) { - knownLength += this._lastBoundary().length; +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map } - - if (!this._valuesToMeasure.length) { - process.nextTick(cb.bind(this, null, knownLength)); - return; + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); } - - asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { - if (err) { - cb(err); - return; + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); } - - values.forEach(function(length) { - knownLength += length; - }); - - cb(null, knownLength); - }); + }; }; -FormData.prototype.submit = function(params, cb) { - var request - , options - , defaults = {method: 'post'} - ; - // parse provided url if it's string - // or treat it as options object - if (typeof params == 'string') { +module.exports = XmlNode; - params = parseUrl(params); - options = populate({ - port: params.port, - path: params.pathname, - host: params.hostname, - protocol: params.protocol - }, defaults); +/***/ }), - // use custom params - } else { +/***/ 61285: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - options = populate(params, defaults); - // if no port provided use default one - if (!options.port) { - options.port = options.protocol == 'https:' ? 443 : 80; +var debug; + +module.exports = function () { + if (!debug) { + try { + /* eslint global-require: off */ + debug = __nccwpck_require__(97785)("follow-redirects"); + } + catch (error) { /* */ } + if (typeof debug !== "function") { + debug = function () { /* */ }; } } + debug.apply(null, arguments); +}; - // put that good code in getHeaders to some use - options.headers = this.getHeaders(params.headers); - // https if specified, fallback to http in any other case - if (options.protocol == 'https:') { - request = https.request(options); - } else { - request = http.request(options); - } +/***/ }), - // get content length and fire away - this.getLength(function(err, length) { - if (err && err !== 'Unknown stream') { - this._error(err); - return; - } +/***/ 42803: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // add content length - if (length) { - request.setHeader('Content-Length', length); - } +var url = __nccwpck_require__(57310); +var URL = url.URL; +var http = __nccwpck_require__(13685); +var https = __nccwpck_require__(95687); +var Writable = (__nccwpck_require__(12781).Writable); +var assert = __nccwpck_require__(39491); +var debug = __nccwpck_require__(61285); - this.pipe(request); - if (cb) { - var onResponse; +// Whether to use the native URL object or the legacy url module +var useNativeURL = false; +try { + assert(new URL()); +} +catch (error) { + useNativeURL = error.code === "ERR_INVALID_URL"; +} + +// URL fields to preserve in copy operations +var preservedUrlFields = [ + "auth", + "host", + "hostname", + "href", + "path", + "pathname", + "port", + "protocol", + "query", + "search", + "hash", +]; - var callback = function (error, responce) { - request.removeListener('error', callback); - request.removeListener('response', onResponse); +// Create handlers that pass events from native requests +var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; +var eventHandlers = Object.create(null); +events.forEach(function (event) { + eventHandlers[event] = function (arg1, arg2, arg3) { + this._redirectable.emit(event, arg1, arg2, arg3); + }; +}); - return cb.call(this, error, responce); - }; +// Error types with codes +var InvalidUrlError = createErrorType( + "ERR_INVALID_URL", + "Invalid URL", + TypeError +); +var RedirectionError = createErrorType( + "ERR_FR_REDIRECTION_FAILURE", + "Redirected request failed" +); +var TooManyRedirectsError = createErrorType( + "ERR_FR_TOO_MANY_REDIRECTS", + "Maximum number of redirects exceeded", + RedirectionError +); +var MaxBodyLengthExceededError = createErrorType( + "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", + "Request body larger than maxBodyLength limit" +); +var WriteAfterEndError = createErrorType( + "ERR_STREAM_WRITE_AFTER_END", + "write after end" +); - onResponse = callback.bind(this, null); +// istanbul ignore next +var destroy = Writable.prototype.destroy || noop; - request.on('error', callback); - request.on('response', onResponse); +// An HTTP(S) request that can be redirected +function RedirectableRequest(options, responseCallback) { + // Initialize the request + Writable.call(this); + this._sanitizeOptions(options); + this._options = options; + this._ended = false; + this._ending = false; + this._redirectCount = 0; + this._redirects = []; + this._requestBodyLength = 0; + this._requestBodyBuffers = []; + + // Attach a callback if passed + if (responseCallback) { + this.on("response", responseCallback); + } + + // React to responses of native requests + var self = this; + this._onNativeResponse = function (response) { + try { + self._processResponse(response); } - }.bind(this)); + catch (cause) { + self.emit("error", cause instanceof RedirectionError ? + cause : new RedirectionError({ cause: cause })); + } + }; - return request; -}; + // Perform the first request + this._performRequest(); +} +RedirectableRequest.prototype = Object.create(Writable.prototype); -FormData.prototype._error = function(err) { - if (!this.error) { - this.error = err; - this.pause(); - this.emit('error', err); - } +RedirectableRequest.prototype.abort = function () { + destroyRequest(this._currentRequest); + this._currentRequest.abort(); + this.emit("abort"); }; -FormData.prototype.toString = function () { - return '[object FormData]'; +RedirectableRequest.prototype.destroy = function (error) { + destroyRequest(this._currentRequest, error); + destroy.call(this, error); + return this; }; +// Writes buffered data to the current native request +RedirectableRequest.prototype.write = function (data, encoding, callback) { + // Writing is not allowed if end has been called + if (this._ending) { + throw new WriteAfterEndError(); + } -/***/ }), - -/***/ 13110: -/***/ ((module) => { + // Validate input and shift parameters if necessary + if (!isString(data) && !isBuffer(data)) { + throw new TypeError("data should be a string, Buffer or Uint8Array"); + } + if (isFunction(encoding)) { + callback = encoding; + encoding = null; + } -// populates missing values -module.exports = function(dst, src) { + // Ignore empty buffers, since writing them doesn't invoke the callback + // https://github.com/nodejs/node/issues/22066 + if (data.length === 0) { + if (callback) { + callback(); + } + return; + } + // Only write when we don't exceed the maximum body length + if (this._requestBodyLength + data.length <= this._options.maxBodyLength) { + this._requestBodyLength += data.length; + this._requestBodyBuffers.push({ data: data, encoding: encoding }); + this._currentRequest.write(data, encoding, callback); + } + // Error when we exceed the maximum body length + else { + this.emit("error", new MaxBodyLengthExceededError()); + this.abort(); + } +}; - Object.keys(src).forEach(function(prop) - { - dst[prop] = dst[prop] || src[prop]; - }); +// Ends the current native request +RedirectableRequest.prototype.end = function (data, encoding, callback) { + // Shift parameters if necessary + if (isFunction(data)) { + callback = data; + data = encoding = null; + } + else if (isFunction(encoding)) { + callback = encoding; + encoding = null; + } - return dst; + // Write data if needed and end + if (!data) { + this._ended = this._ending = true; + this._currentRequest.end(null, null, callback); + } + else { + var self = this; + var currentRequest = this._currentRequest; + this.write(data, encoding, function () { + self._ended = true; + currentRequest.end(null, null, callback); + }); + this._ending = true; + } }; +// Sets a header value on the current native request +RedirectableRequest.prototype.setHeader = function (name, value) { + this._options.headers[name] = value; + this._currentRequest.setHeader(name, value); +}; -/***/ }), +// Clears a header value on the current native request +RedirectableRequest.prototype.removeHeader = function (name) { + delete this._options.headers[name]; + this._currentRequest.removeHeader(name); +}; -/***/ 78095: -/***/ ((module) => { +// Global timeout for all underlying requests +RedirectableRequest.prototype.setTimeout = function (msecs, callback) { + var self = this; -"use strict"; + // Destroys the socket on timeout + function destroyOnTimeout(socket) { + socket.setTimeout(msecs); + socket.removeListener("timeout", socket.destroy); + socket.addListener("timeout", socket.destroy); + } + // Sets up a timer to trigger a timeout event + function startTimer(socket) { + if (self._timeout) { + clearTimeout(self._timeout); + } + self._timeout = setTimeout(function () { + self.emit("timeout"); + clearTimer(); + }, msecs); + destroyOnTimeout(socket); + } -module.exports = clone + // Stops a timeout from triggering + function clearTimer() { + // Clear the timeout + if (self._timeout) { + clearTimeout(self._timeout); + self._timeout = null; + } -var getPrototypeOf = Object.getPrototypeOf || function (obj) { - return obj.__proto__ -} + // Clean up all attached listeners + self.removeListener("abort", clearTimer); + self.removeListener("error", clearTimer); + self.removeListener("response", clearTimer); + self.removeListener("close", clearTimer); + if (callback) { + self.removeListener("timeout", callback); + } + if (!self.socket) { + self._currentRequest.removeListener("socket", startTimer); + } + } -function clone (obj) { - if (obj === null || typeof obj !== 'object') - return obj + // Attach callback if passed + if (callback) { + this.on("timeout", callback); + } - if (obj instanceof Object) - var copy = { __proto__: getPrototypeOf(obj) } - else - var copy = Object.create(null) + // Start the timer if or when the socket is opened + if (this.socket) { + startTimer(this.socket); + } + else { + this._currentRequest.once("socket", startTimer); + } - Object.getOwnPropertyNames(obj).forEach(function (key) { - Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) - }) + // Clean up on events + this.on("socket", destroyOnTimeout); + this.on("abort", clearTimer); + this.on("error", clearTimer); + this.on("response", clearTimer); + this.on("close", clearTimer); - return copy -} + return this; +}; +// Proxy all other public ClientRequest methods +[ + "flushHeaders", "getHeader", + "setNoDelay", "setSocketKeepAlive", +].forEach(function (method) { + RedirectableRequest.prototype[method] = function (a, b) { + return this._currentRequest[method](a, b); + }; +}); -/***/ }), - -/***/ 94412: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// Proxy all public ClientRequest properties +["aborted", "connection", "socket"].forEach(function (property) { + Object.defineProperty(RedirectableRequest.prototype, property, { + get: function () { return this._currentRequest[property]; }, + }); +}); -var fs = __nccwpck_require__(57147) -var polyfills = __nccwpck_require__(52387) -var legacy = __nccwpck_require__(89243) -var clone = __nccwpck_require__(78095) +RedirectableRequest.prototype._sanitizeOptions = function (options) { + // Ensure headers are always present + if (!options.headers) { + options.headers = {}; + } -var util = __nccwpck_require__(73837) + // Since http.request treats host as an alias of hostname, + // but the url module interprets host as hostname plus port, + // eliminate the host property to avoid confusion. + if (options.host) { + // Use hostname if set, because it has precedence + if (!options.hostname) { + options.hostname = options.host; + } + delete options.host; + } -/* istanbul ignore next - node 0.x polyfill */ -var gracefulQueue -var previousSymbol + // Complete the URL object when necessary + if (!options.pathname && options.path) { + var searchPos = options.path.indexOf("?"); + if (searchPos < 0) { + options.pathname = options.path; + } + else { + options.pathname = options.path.substring(0, searchPos); + options.search = options.path.substring(searchPos); + } + } +}; -/* istanbul ignore else - node 0.x polyfill */ -if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { - gracefulQueue = Symbol.for('graceful-fs.queue') - // This is used in testing by future versions - previousSymbol = Symbol.for('graceful-fs.previous') -} else { - gracefulQueue = '___graceful-fs.queue' - previousSymbol = '___graceful-fs.previous' -} -function noop () {} +// Executes the next native request (initial or redirect) +RedirectableRequest.prototype._performRequest = function () { + // Load the native protocol + var protocol = this._options.protocol; + var nativeProtocol = this._options.nativeProtocols[protocol]; + if (!nativeProtocol) { + throw new TypeError("Unsupported protocol " + protocol); + } -function publishQueue(context, queue) { - Object.defineProperty(context, gracefulQueue, { - get: function() { - return queue - } - }) -} + // If specified, use the agent corresponding to the protocol + // (HTTP and HTTPS use different types of agents) + if (this._options.agents) { + var scheme = protocol.slice(0, -1); + this._options.agent = this._options.agents[scheme]; + } -var debug = noop -if (util.debuglog) - debug = util.debuglog('gfs4') -else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) - debug = function() { - var m = util.format.apply(util, arguments) - m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') - console.error(m) + // Create the native request and set up its event handlers + var request = this._currentRequest = + nativeProtocol.request(this._options, this._onNativeResponse); + request._redirectable = this; + for (var event of events) { + request.on(event, eventHandlers[event]); } -// Once time initialization -if (!fs[gracefulQueue]) { - // This queue can be shared by multiple loaded instances - var queue = global[gracefulQueue] || [] - publishQueue(fs, queue) + // RFC7230§5.3.1: When making a request directly to an origin server, […] + // a client MUST send only the absolute path […] as the request-target. + this._currentUrl = /^\//.test(this._options.path) ? + url.format(this._options) : + // When making a request to a proxy, […] + // a client MUST send the target URI in absolute-form […]. + this._options.path; - // Patch fs.close/closeSync to shared queue version, because we need - // to retry() whenever a close happens *anywhere* in the program. - // This is essential when multiple graceful-fs instances are - // in play at the same time. - fs.close = (function (fs$close) { - function close (fd, cb) { - return fs$close.call(fs, fd, function (err) { - // This function uses the graceful-fs shared queue - if (!err) { - resetQueue() + // End a redirected request + // (The first request must be ended explicitly with RedirectableRequest#end) + if (this._isRedirect) { + // Write the request entity and end + var i = 0; + var self = this; + var buffers = this._requestBodyBuffers; + (function writeNext(error) { + // Only write if this request has not been redirected yet + /* istanbul ignore else */ + if (request === self._currentRequest) { + // Report any write errors + /* istanbul ignore if */ + if (error) { + self.emit("error", error); + } + // Write the next buffer if there are still left + else if (i < buffers.length) { + var buffer = buffers[i++]; + /* istanbul ignore else */ + if (!request.finished) { + request.write(buffer.data, buffer.encoding, writeNext); + } + } + // End the request if `end` has been called on us + else if (self._ended) { + request.end(); } + } + }()); + } +}; - if (typeof cb === 'function') - cb.apply(this, arguments) - }) - } +// Processes a response from the current native request +RedirectableRequest.prototype._processResponse = function (response) { + // Store the redirected response + var statusCode = response.statusCode; + if (this._options.trackRedirects) { + this._redirects.push({ + url: this._currentUrl, + headers: response.headers, + statusCode: statusCode, + }); + } - Object.defineProperty(close, previousSymbol, { - value: fs$close - }) - return close - })(fs.close) + // RFC7231§6.4: The 3xx (Redirection) class of status code indicates + // that further action needs to be taken by the user agent in order to + // fulfill the request. If a Location header field is provided, + // the user agent MAY automatically redirect its request to the URI + // referenced by the Location field value, + // even if the specific status code is not understood. - fs.closeSync = (function (fs$closeSync) { - function closeSync (fd) { - // This function uses the graceful-fs shared queue - fs$closeSync.apply(fs, arguments) - resetQueue() - } + // If the response is not a redirect; return it as-is + var location = response.headers.location; + if (!location || this._options.followRedirects === false || + statusCode < 300 || statusCode >= 400) { + response.responseUrl = this._currentUrl; + response.redirects = this._redirects; + this.emit("response", response); - Object.defineProperty(closeSync, previousSymbol, { - value: fs$closeSync - }) - return closeSync - })(fs.closeSync) + // Clean up + this._requestBodyBuffers = []; + return; + } - if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { - process.on('exit', function() { - debug(fs[gracefulQueue]) - __nccwpck_require__(39491).equal(fs[gracefulQueue].length, 0) - }) + // The response is a redirect, so abort the current request + destroyRequest(this._currentRequest); + // Discard the remainder of the response to avoid waiting for data + response.destroy(); + + // RFC7231§6.4: A client SHOULD detect and intervene + // in cyclical redirections (i.e., "infinite" redirection loops). + if (++this._redirectCount > this._options.maxRedirects) { + throw new TooManyRedirectsError(); } -} -if (!global[gracefulQueue]) { - publishQueue(global, fs[gracefulQueue]); -} + // Store the request headers if applicable + var requestHeaders; + var beforeRedirect = this._options.beforeRedirect; + if (beforeRedirect) { + requestHeaders = Object.assign({ + // The Host header was set by nativeProtocol.request + Host: response.req.getHeader("host"), + }, this._options.headers); + } -module.exports = patch(clone(fs)) -if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { - module.exports = patch(fs) - fs.__patched = true; -} + // RFC7231§6.4: Automatic redirection needs to done with + // care for methods not known to be safe, […] + // RFC7231§6.4.2–3: For historical reasons, a user agent MAY change + // the request method from POST to GET for the subsequent request. + var method = this._options.method; + if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" || + // RFC7231§6.4.4: The 303 (See Other) status code indicates that + // the server is redirecting the user agent to a different resource […] + // A user agent can perform a retrieval request targeting that URI + // (a GET or HEAD request if using HTTP) […] + (statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) { + this._options.method = "GET"; + // Drop a possible entity and headers related to it + this._requestBodyBuffers = []; + removeMatchingHeaders(/^content-/i, this._options.headers); + } -function patch (fs) { - // Everything that references the open() function needs to be in here - polyfills(fs) - fs.gracefulify = patch + // Drop the Host header, as the redirect might lead to a different host + var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers); - fs.createReadStream = createReadStream - fs.createWriteStream = createWriteStream - var fs$readFile = fs.readFile - fs.readFile = readFile - function readFile (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null + // If the redirect is relative, carry over the host of the last request + var currentUrlParts = parseUrl(this._currentUrl); + var currentHost = currentHostHeader || currentUrlParts.host; + var currentUrl = /^\w+:/.test(location) ? this._currentUrl : + url.format(Object.assign(currentUrlParts, { host: currentHost })); - return go$readFile(path, options, cb) + // Create the redirected request + var redirectUrl = resolveUrl(location, currentUrl); + debug("redirecting to", redirectUrl.href); + this._isRedirect = true; + spreadUrlObject(redirectUrl, this._options); - function go$readFile (path, options, cb, startTime) { - return fs$readFile(path, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } + // Drop confidential headers when redirecting to a less secure protocol + // or to a different domain that is not a superdomain + if (redirectUrl.protocol !== currentUrlParts.protocol && + redirectUrl.protocol !== "https:" || + redirectUrl.host !== currentHost && + !isSubdomain(redirectUrl.host, currentHost)) { + removeMatchingHeaders(/^(?:(?:proxy-)?authorization|cookie)$/i, this._options.headers); } - var fs$writeFile = fs.writeFile - fs.writeFile = writeFile - function writeFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null + // Evaluate the beforeRedirect callback + if (isFunction(beforeRedirect)) { + var responseDetails = { + headers: response.headers, + statusCode: statusCode, + }; + var requestDetails = { + url: currentUrl, + method: method, + headers: requestHeaders, + }; + beforeRedirect(this._options, responseDetails, requestDetails); + this._sanitizeOptions(this._options); + } - return go$writeFile(path, data, options, cb) + // Perform the redirected request + this._performRequest(); +}; - function go$writeFile (path, data, options, cb, startTime) { - return fs$writeFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } +// Wraps the key/value object of protocols with redirect functionality +function wrap(protocols) { + // Default settings + var exports = { + maxRedirects: 21, + maxBodyLength: 10 * 1024 * 1024, + }; - var fs$appendFile = fs.appendFile - if (fs$appendFile) - fs.appendFile = appendFile - function appendFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null + // Wrap each protocol + var nativeProtocols = {}; + Object.keys(protocols).forEach(function (scheme) { + var protocol = scheme + ":"; + var nativeProtocol = nativeProtocols[protocol] = protocols[scheme]; + var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol); - return go$appendFile(path, data, options, cb) + // Executes a request, following redirects + function request(input, options, callback) { + // Parse parameters, ensuring that input is an object + if (isURL(input)) { + input = spreadUrlObject(input); + } + else if (isString(input)) { + input = spreadUrlObject(parseUrl(input)); + } + else { + callback = options; + options = validateUrl(input); + input = { protocol: protocol }; + } + if (isFunction(options)) { + callback = options; + options = null; + } - function go$appendFile (path, data, options, cb, startTime) { - return fs$appendFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } + // Set defaults + options = Object.assign({ + maxRedirects: exports.maxRedirects, + maxBodyLength: exports.maxBodyLength, + }, input, options); + options.nativeProtocols = nativeProtocols; + if (!isString(options.host) && !isString(options.hostname)) { + options.hostname = "::1"; + } - var fs$copyFile = fs.copyFile - if (fs$copyFile) - fs.copyFile = copyFile - function copyFile (src, dest, flags, cb) { - if (typeof flags === 'function') { - cb = flags - flags = 0 + assert.equal(options.protocol, protocol, "protocol mismatch"); + debug("options", options); + return new RedirectableRequest(options, callback); } - return go$copyFile(src, dest, flags, cb) - function go$copyFile (src, dest, flags, cb, startTime) { - return fs$copyFile(src, dest, flags, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) + // Executes a GET request, following redirects + function get(input, options, callback) { + var wrappedRequest = wrappedProtocol.request(input, options, callback); + wrappedRequest.end(); + return wrappedRequest; } - } - - var fs$readdir = fs.readdir - fs.readdir = readdir - var noReaddirOptionVersions = /^v[0-5]\./ - function readdir (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - var go$readdir = noReaddirOptionVersions.test(process.version) - ? function go$readdir (path, options, cb, startTime) { - return fs$readdir(path, fs$readdirCallback( - path, options, cb, startTime - )) - } - : function go$readdir (path, options, cb, startTime) { - return fs$readdir(path, options, fs$readdirCallback( - path, options, cb, startTime - )) - } - return go$readdir(path, options, cb) + // Expose the properties on the wrapped protocol + Object.defineProperties(wrappedProtocol, { + request: { value: request, configurable: true, enumerable: true, writable: true }, + get: { value: get, configurable: true, enumerable: true, writable: true }, + }); + }); + return exports; +} - function fs$readdirCallback (path, options, cb, startTime) { - return function (err, files) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([ - go$readdir, - [path, options, cb], - err, - startTime || Date.now(), - Date.now() - ]) - else { - if (files && files.sort) - files.sort() +function noop() { /* empty */ } - if (typeof cb === 'function') - cb.call(this, err, files) - } - } +function parseUrl(input) { + var parsed; + /* istanbul ignore else */ + if (useNativeURL) { + parsed = new URL(input); + } + else { + // Ensure the URL is valid and absolute + parsed = validateUrl(url.parse(input)); + if (!isString(parsed.protocol)) { + throw new InvalidUrlError({ input }); } } + return parsed; +} - if (process.version.substr(0, 4) === 'v0.8') { - var legStreams = legacy(fs) - ReadStream = legStreams.ReadStream - WriteStream = legStreams.WriteStream +function resolveUrl(relative, base) { + /* istanbul ignore next */ + return useNativeURL ? new URL(relative, base) : parseUrl(url.resolve(base, relative)); +} + +function validateUrl(input) { + if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { + throw new InvalidUrlError({ input: input.href || input }); + } + if (/^\[/.test(input.host) && !/^\[[:0-9a-f]+\](:\d+)?$/i.test(input.host)) { + throw new InvalidUrlError({ input: input.href || input }); } + return input; +} - var fs$ReadStream = fs.ReadStream - if (fs$ReadStream) { - ReadStream.prototype = Object.create(fs$ReadStream.prototype) - ReadStream.prototype.open = ReadStream$open +function spreadUrlObject(urlObject, target) { + var spread = target || {}; + for (var key of preservedUrlFields) { + spread[key] = urlObject[key]; } - var fs$WriteStream = fs.WriteStream - if (fs$WriteStream) { - WriteStream.prototype = Object.create(fs$WriteStream.prototype) - WriteStream.prototype.open = WriteStream$open + // Fix IPv6 hostname + if (spread.hostname.startsWith("[")) { + spread.hostname = spread.hostname.slice(1, -1); + } + // Ensure port is a number + if (spread.port !== "") { + spread.port = Number(spread.port); } + // Concatenate path + spread.path = spread.search ? spread.pathname + spread.search : spread.pathname; - Object.defineProperty(fs, 'ReadStream', { - get: function () { - return ReadStream - }, - set: function (val) { - ReadStream = val - }, - enumerable: true, - configurable: true - }) - Object.defineProperty(fs, 'WriteStream', { - get: function () { - return WriteStream - }, - set: function (val) { - WriteStream = val - }, - enumerable: true, - configurable: true - }) + return spread; +} - // legacy names - var FileReadStream = ReadStream - Object.defineProperty(fs, 'FileReadStream', { - get: function () { - return FileReadStream - }, - set: function (val) { - FileReadStream = val - }, - enumerable: true, - configurable: true - }) - var FileWriteStream = WriteStream - Object.defineProperty(fs, 'FileWriteStream', { - get: function () { - return FileWriteStream - }, - set: function (val) { - FileWriteStream = val - }, - enumerable: true, - configurable: true - }) +function removeMatchingHeaders(regex, headers) { + var lastValue; + for (var header in headers) { + if (regex.test(header)) { + lastValue = headers[header]; + delete headers[header]; + } + } + return (lastValue === null || typeof lastValue === "undefined") ? + undefined : String(lastValue).trim(); +} - function ReadStream (path, options) { - if (this instanceof ReadStream) - return fs$ReadStream.apply(this, arguments), this - else - return ReadStream.apply(Object.create(ReadStream.prototype), arguments) +function createErrorType(code, message, baseClass) { + // Create constructor + function CustomError(properties) { + Error.captureStackTrace(this, this.constructor); + Object.assign(this, properties || {}); + this.code = code; + this.message = this.cause ? message + ": " + this.cause.message : message; } - function ReadStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - if (that.autoClose) - that.destroy() + // Attach constructor and set default properties + CustomError.prototype = new (baseClass || Error)(); + Object.defineProperties(CustomError.prototype, { + constructor: { + value: CustomError, + enumerable: false, + }, + name: { + value: "Error [" + code + "]", + enumerable: false, + }, + }); + return CustomError; +} - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - that.read() - } - }) +function destroyRequest(request, error) { + for (var event of events) { + request.removeListener(event, eventHandlers[event]); } + request.on("error", noop); + request.destroy(error); +} - function WriteStream (path, options) { - if (this instanceof WriteStream) - return fs$WriteStream.apply(this, arguments), this - else - return WriteStream.apply(Object.create(WriteStream.prototype), arguments) - } +function isSubdomain(subdomain, domain) { + assert(isString(subdomain) && isString(domain)); + var dot = subdomain.length - domain.length - 1; + return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain); +} - function WriteStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - that.destroy() - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - } - }) - } +function isString(value) { + return typeof value === "string" || value instanceof String; +} - function createReadStream (path, options) { - return new fs.ReadStream(path, options) - } +function isFunction(value) { + return typeof value === "function"; +} - function createWriteStream (path, options) { - return new fs.WriteStream(path, options) - } +function isBuffer(value) { + return typeof value === "object" && ("length" in value); +} - var fs$open = fs.open - fs.open = open - function open (path, flags, mode, cb) { - if (typeof mode === 'function') - cb = mode, mode = null +function isURL(value) { + return URL && value instanceof URL; +} - return go$open(path, flags, mode, cb) +// Exports +module.exports = wrap({ http: http, https: https }); +module.exports.wrap = wrap; - function go$open (path, flags, mode, cb, startTime) { - return fs$open(path, flags, mode, function (err, fd) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - } - }) - } - } - return fs -} +/***/ }), -function enqueue (elem) { - debug('ENQUEUE', elem[0].name, elem[1]) - fs[gracefulQueue].push(elem) - retry() -} +/***/ 95676: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -// keep track of the timeout between retry() calls -var retryTimer +var CombinedStream = __nccwpck_require__(87105); +var util = __nccwpck_require__(73837); +var path = __nccwpck_require__(71017); +var http = __nccwpck_require__(13685); +var https = __nccwpck_require__(95687); +var parseUrl = (__nccwpck_require__(57310).parse); +var fs = __nccwpck_require__(57147); +var Stream = (__nccwpck_require__(12781).Stream); +var mime = __nccwpck_require__(36032); +var asynckit = __nccwpck_require__(40785); +var populate = __nccwpck_require__(23978); -// reset the startTime and lastTime to now -// this resets the start of the 60 second overall timeout as well as the -// delay between attempts so that we'll retry these jobs sooner -function resetQueue () { - var now = Date.now() - for (var i = 0; i < fs[gracefulQueue].length; ++i) { - // entries that are only a length of 2 are from an older version, don't - // bother modifying those since they'll be retried anyway. - if (fs[gracefulQueue][i].length > 2) { - fs[gracefulQueue][i][3] = now // startTime - fs[gracefulQueue][i][4] = now // lastTime - } +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; } - // call retry to make sure we're actively processing the queue - retry() } -function retry () { - // clear the timer and remove it to help prevent unintended concurrency - clearTimeout(retryTimer) - retryTimer = undefined +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; - if (fs[gracefulQueue].length === 0) - return +FormData.prototype.append = function(field, value, options) { - var elem = fs[gracefulQueue].shift() - var fn = elem[0] - var args = elem[1] - // these items may be unset if they were added by an older graceful-fs - var err = elem[2] - var startTime = elem[3] - var lastTime = elem[4] + options = options || {}; - // if we don't have a startTime we have no way of knowing if we've waited - // long enough, so go ahead and retry this item now - if (startTime === undefined) { - debug('RETRY', fn.name, args) - fn.apply(null, args) - } else if (Date.now() - startTime >= 60000) { - // it's been more than 60 seconds total, bail now - debug('TIMEOUT', fn.name, args) - var cb = args.pop() - if (typeof cb === 'function') - cb.call(null, err) - } else { - // the amount of time between the last attempt and right now - var sinceAttempt = Date.now() - lastTime - // the amount of time between when we first tried, and when we last tried - // rounded up to at least 1 - var sinceStart = Math.max(lastTime - startTime, 1) - // backoff. wait longer than the total time we've been retrying, but only - // up to a maximum of 100ms - var desiredDelay = Math.min(sinceStart * 1.2, 100) - // it's been long enough since the last retry, do it again - if (sinceAttempt >= desiredDelay) { - debug('RETRY', fn.name, args) - fn.apply(null, args.concat([startTime])) - } else { - // if we can't do this job yet, push it to the end of the queue - // and let the next iteration check again - fs[gracefulQueue].push(elem) - } + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; } - // schedule our next run if one isn't already scheduled - if (retryTimer === undefined) { - retryTimer = setTimeout(retry, 0) + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; } -} + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } -/***/ }), + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); -/***/ 89243: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + append(header); + append(value); + append(footer); -var Stream = (__nccwpck_require__(12781).Stream) + // pass along options.knownLength + this._trackLength(header, value, options); +}; -module.exports = legacy +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; -function legacy (fs) { - return { - ReadStream: ReadStream, - WriteStream: WriteStream + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); } - function ReadStream (path, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path, options); + this._valueLength += valueLength; - Stream.call(this); + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; - var self = this; + // empty or either doesn't have path or not an http response or not a stream + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { + return; + } - this.path = path; - this.fd = null; - this.readable = true; - this.paused = false; + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; - this.flags = 'r'; - this.mode = 438; /*=0666*/ - this.bufferSize = 64 * 1024; +FormData.prototype._lengthRetriever = function(value, callback) { - options = options || {}; + if (value.hasOwnProperty('fd')) { - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { - if (this.encoding) this.setEncoding(this.encoding); + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.end === undefined) { - this.end = Infinity; - } else if ('number' !== typeof this.end) { - throw TypeError('end must be a Number'); - } + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { - if (this.start > this.end) { - throw new Error('start must be <= end'); - } + var fileSize; - this.pos = this.start; - } + if (err) { + callback(err); + return; + } - if (this.fd !== null) { - process.nextTick(function() { - self._read(); + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); }); - return; } - fs.open(this.path, this.flags, this.mode, function (err, fd) { - if (err) { - self.emit('error', err); - self.readable = false; - return; - } + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); - self.fd = fd; - self.emit('open', fd); - self._read(); - }) + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); } +}; - function WriteStream (path, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path, options); +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } - Stream.call(this); + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); - this.path = path; - this.fd = null; - this.writable = true; + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; - this.flags = 'w'; - this.encoding = 'binary'; - this.mode = 438; /*=0666*/ - this.bytesWritten = 0; + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } - options = options || {}; + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; + // skip nullish headers. + if (header == null) { + continue; } - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.start < 0) { - throw new Error('start must be >= zero'); - } - - this.pos = this.start; + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; } - this.busy = false; - this._queue = []; - - if (this.fd === null) { - this._open = fs.open; - this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); - this.flush(); + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; } } -} + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; -/***/ }), +FormData.prototype._getContentDisposition = function(value, options) { -/***/ 52387: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var filename + , contentDisposition + ; -var constants = __nccwpck_require__(22057) + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } -var origCwd = process.cwd -var cwd = null + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } -var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform + return contentDisposition; +}; -process.cwd = function() { - if (!cwd) - cwd = origCwd.call(process) - return cwd -} -try { - process.cwd() -} catch (er) {} +FormData.prototype._getContentType = function(value, options) { -// This check is needed until node.js 12 is required -if (typeof process.chdir === 'function') { - var chdir = process.chdir - process.chdir = function (d) { - cwd = null - chdir.call(process, d) + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); } - if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) -} -module.exports = patch + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } -function patch (fs) { - // (re-)implement some things that are known busted or missing. + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } - // lchmod, broken prior to 0.6.2 - // back-port the fix here. - if (constants.hasOwnProperty('O_SYMLINK') && - process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs) + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); } - // lutimes implementation, or no-op - if (!fs.lutimes) { - patchLutimes(fs) + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; } - // https://github.com/isaacs/node-graceful-fs/issues/4 - // Chown should not fail on einval or eperm if non-root. - // It should not fail on enosys ever, as this just indicates - // that a fs doesn't support the intended operation. + return contentType; +}; - fs.chown = chownFix(fs.chown) - fs.fchown = chownFix(fs.fchown) - fs.lchown = chownFix(fs.lchown) +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; - fs.chmod = chmodFix(fs.chmod) - fs.fchmod = chmodFix(fs.fchmod) - fs.lchmod = chmodFix(fs.lchmod) + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } - fs.chownSync = chownFixSync(fs.chownSync) - fs.fchownSync = chownFixSync(fs.fchownSync) - fs.lchownSync = chownFixSync(fs.lchownSync) + next(footer); + }.bind(this); +}; - fs.chmodSync = chmodFixSync(fs.chmodSync) - fs.fchmodSync = chmodFixSync(fs.fchmodSync) - fs.lchmodSync = chmodFixSync(fs.lchmodSync) - - fs.stat = statFix(fs.stat) - fs.fstat = statFix(fs.fstat) - fs.lstat = statFix(fs.lstat) +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; - fs.statSync = statFixSync(fs.statSync) - fs.fstatSync = statFixSync(fs.fstatSync) - fs.lstatSync = statFixSync(fs.lstatSync) +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; - // if lchmod/lchown do not exist, then make them no-ops - if (fs.chmod && !fs.lchmod) { - fs.lchmod = function (path, mode, cb) { - if (cb) process.nextTick(cb) - } - fs.lchmodSync = function () {} - } - if (fs.chown && !fs.lchown) { - fs.lchown = function (path, uid, gid, cb) { - if (cb) process.nextTick(cb) + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; } - fs.lchownSync = function () {} } - // on Windows, A/V software can lock the directory, causing this - // to fail with an EACCES or EPERM if the directory contains newly - // created files. Try again on failure, for up to 60 seconds. + return formHeaders; +}; - // Set the timeout this long because some Windows Anti-Virus, such as Parity - // bit9, may lock files for up to a minute, causing npm package install - // failures. Also, take care to yield the scheduler. Windows scheduling gives - // CPU to a busy looping process, which can cause the program causing the lock - // contention to be starved of CPU by node, so the contention doesn't resolve. - if (platform === "win32") { - fs.rename = typeof fs.rename !== 'function' ? fs.rename - : (function (fs$rename) { - function rename (from, to, cb) { - var start = Date.now() - var backoff = 0; - fs$rename(from, to, function CB (er) { - if (er - && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") - && Date.now() - start < 60000) { - setTimeout(function() { - fs.stat(to, function (stater, st) { - if (stater && stater.code === "ENOENT") - fs$rename(from, to, CB); - else - cb(er) - }) - }, backoff) - if (backoff < 100) - backoff += 10; - return; - } - if (cb) cb(er) - }) - } - if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) - return rename - })(fs.rename) +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); } - // if read() returns EAGAIN, then just try it again. - fs.read = typeof fs.read !== 'function' ? fs.read - : (function (fs$read) { - function read (fd, buffer, offset, length, position, callback_) { - var callback - if (callback_ && typeof callback_ === 'function') { - var eagCounter = 0 - callback = function (er, _, __) { - if (er && er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - callback_.apply(this, arguments) - } - } - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } + return this._boundary; +}; - // This ensures `util.promisify` works as it does for native `fs.read`. - if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) - return read - })(fs.read) +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); - fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync - : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { - var eagCounter = 0 - while (true) { - try { - return fs$readSync.call(fs, fd, buffer, offset, length, position) - } catch (er) { - if (er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - continue - } - throw er + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); } - } - }})(fs.readSync) - function patchLchmod (fs) { - fs.lchmod = function (path, mode, callback) { - fs.open( path - , constants.O_WRONLY | constants.O_SYMLINK - , mode - , function (err, fd) { - if (err) { - if (callback) callback(err) - return - } - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - fs.fchmod(fd, mode, function (err) { - fs.close(fd, function(err2) { - if (callback) callback(err || err2) - }) - }) - }) + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } } + } - fs.lchmodSync = function (path, mode) { - var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - var threw = true - var ret - try { - ret = fs.fchmodSync(fd, mode) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); } - function patchLutimes (fs) { - if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { - fs.lutimes = function (path, at, mt, cb) { - fs.open(path, constants.O_SYMLINK, function (er, fd) { - if (er) { - if (cb) cb(er) - return - } - fs.futimes(fd, at, mt, function (er) { - fs.close(fd, function (er2) { - if (cb) cb(er || er2) - }) - }) - }) - } + this._boundary = boundary; +}; - fs.lutimesSync = function (path, at, mt) { - var fd = fs.openSync(path, constants.O_SYMLINK) - var ret - var threw = true - try { - ret = fs.futimesSync(fd, at, mt) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; - } else if (fs.futimes) { - fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } - fs.lutimesSync = function () {} - } + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; } - function chmodFix (orig) { - if (!orig) return orig - return function (target, mode, cb) { - return orig.call(fs, target, mode, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); } - function chmodFixSync (orig) { - if (!orig) return orig - return function (target, mode) { - try { - return orig.call(fs, target, mode) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } + return knownLength; +}; +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; - function chownFix (orig) { - if (!orig) return orig - return function (target, uid, gid, cb) { - return orig.call(fs, target, uid, gid, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } + if (this._valuesToMeasure.length) { + hasKnownLength = false; } - function chownFixSync (orig) { - if (!orig) return orig - return function (target, uid, gid) { - try { - return orig.call(fs, target, uid, gid) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } + return hasKnownLength; +}; - function statFix (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - function callback (er, stats) { - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - if (cb) cb.apply(this, arguments) - } - return options ? orig.call(fs, target, options, callback) - : orig.call(fs, target, callback) - } - } +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; - function statFixSync (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options) { - var stats = options ? orig.call(fs, target, options) - : orig.call(fs, target) - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - return stats; - } + if (this._streams.length) { + knownLength += this._lastBoundary().length; } - // ENOSYS means that the fs doesn't support the op. Just ignore - // that, because it doesn't matter. - // - // if there's no getuid, or if getuid() is something other - // than 0, and the error is EINVAL or EPERM, then just ignore - // it. - // - // This specific case is a silent failure in cp, install, tar, - // and most other unix tools that manage permissions. - // - // When running as root, or if other types of errors are - // encountered, then it's strict. - function chownErOk (er) { - if (!er) - return true - - if (er.code === "ENOSYS") - return true + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } - var nonroot = !process.getuid || process.getuid() !== 0 - if (nonroot) { - if (er.code === "EINVAL" || er.code === "EPERM") - return true + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; } - return false - } -} + values.forEach(function(length) { + knownLength += length; + }); + cb(null, knownLength); + }); +}; -/***/ }), +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; -/***/ 81619: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { -try { - var util = __nccwpck_require__(73837); - /* istanbul ignore next */ - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - /* istanbul ignore next */ - module.exports = __nccwpck_require__(92945); -} + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + // use custom params + } else { -/***/ }), + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } -/***/ 92945: -/***/ ((module) => { + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }) - } - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); } -} + // get content length and fire away + this.getLength(function(err, length) { + if (err && err !== 'Unknown stream') { + this._error(err); + return; + } -/***/ }), + // add content length + if (length) { + request.setHeader('Content-Length', length); + } -/***/ 21671: -/***/ ((__unused_webpack_module, exports) => { + this.pipe(request); + if (cb) { + var onResponse; -"use strict"; + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + return cb.call(this, error, responce); + }; -Object.defineProperty(exports, "__esModule", ({ value: true })); + onResponse = callback.bind(this, null); -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} + return request; +}; -function isPlainObject(o) { - var ctor,prot; +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; - if (isObject(o) === false) return false; +FormData.prototype.toString = function () { + return '[object FormData]'; +}; - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; +/***/ }), - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } +/***/ 23978: +/***/ ((module) => { - // Most likely a plain Object - return true; -} +// populates missing values +module.exports = function(dst, src) { -exports.isPlainObject = isPlainObject; + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; /***/ }), -/***/ 78239: +/***/ 67171: /***/ ((module) => { "use strict"; -const isStream = stream => - stream !== null && - typeof stream === 'object' && - typeof stream.pipe === 'function'; +module.exports = clone -isStream.writable = stream => - isStream(stream) && - stream.writable !== false && - typeof stream._write === 'function' && - typeof stream._writableState === 'object'; +var getPrototypeOf = Object.getPrototypeOf || function (obj) { + return obj.__proto__ +} -isStream.readable = stream => - isStream(stream) && - stream.readable !== false && - typeof stream._read === 'function' && - typeof stream._readableState === 'object'; +function clone (obj) { + if (obj === null || typeof obj !== 'object') + return obj -isStream.duplex = stream => - isStream.writable(stream) && - isStream.readable(stream); + if (obj instanceof Object) + var copy = { __proto__: getPrototypeOf(obj) } + else + var copy = Object.create(null) -isStream.transform = stream => - isStream.duplex(stream) && - typeof stream._transform === 'function'; + Object.getOwnPropertyNames(obj).forEach(function (key) { + Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) + }) -module.exports = isStream; + return copy +} /***/ }), -/***/ 87209: -/***/ ((module) => { - -var toString = {}.toString; +/***/ 29303: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = Array.isArray || function (arr) { - return toString.call(arr) == '[object Array]'; -}; +var fs = __nccwpck_require__(57147) +var polyfills = __nccwpck_require__(20307) +var legacy = __nccwpck_require__(22492) +var clone = __nccwpck_require__(67171) +var util = __nccwpck_require__(73837) -/***/ }), +/* istanbul ignore next - node 0.x polyfill */ +var gracefulQueue +var previousSymbol -/***/ 44485: -/***/ ((module) => { +/* istanbul ignore else - node 0.x polyfill */ +if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { + gracefulQueue = Symbol.for('graceful-fs.queue') + // This is used in testing by future versions + previousSymbol = Symbol.for('graceful-fs.previous') +} else { + gracefulQueue = '___graceful-fs.queue' + previousSymbol = '___graceful-fs.previous' +} -"use strict"; -function e(e){this.message=e}e.prototype=new Error,e.prototype.name="InvalidCharacterError";var r="undefined"!=typeof window&&window.atob&&window.atob.bind(window)||function(r){var t=String(r).replace(/=+$/,"");if(t.length%4==1)throw new e("'atob' failed: The string to be decoded is not correctly encoded.");for(var n,o,a=0,i=0,c="";o=t.charAt(i++);~o&&(n=a%4?64*n+o:o,a++%4)?c+=String.fromCharCode(255&n>>(-2*a&6)):0)o="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=".indexOf(o);return c};function t(e){var t=e.replace(/-/g,"+").replace(/_/g,"/");switch(t.length%4){case 0:break;case 2:t+="==";break;case 3:t+="=";break;default:throw"Illegal base64url string!"}try{return function(e){return decodeURIComponent(r(e).replace(/(.)/g,(function(e,r){var t=r.charCodeAt(0).toString(16).toUpperCase();return t.length<2&&(t="0"+t),"%"+t})))}(t)}catch(e){return r(t)}}function n(e){this.message=e}function o(e,r){if("string"!=typeof e)throw new n("Invalid token specified");var o=!0===(r=r||{}).header?0:1;try{return JSON.parse(t(e.split(".")[o]))}catch(e){throw new n("Invalid token specified: "+e.message)}}n.prototype=new Error,n.prototype.name="InvalidTokenError";const a=o;a.default=o,a.InvalidTokenError=n,module.exports=a; -//# sourceMappingURL=jwt-decode.cjs.js.map +function noop () {} +function publishQueue(context, queue) { + Object.defineProperty(context, gracefulQueue, { + get: function() { + return queue + } + }) +} -/***/ }), +var debug = noop +if (util.debuglog) + debug = util.debuglog('gfs4') +else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) + debug = function() { + var m = util.format.apply(util, arguments) + m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') + console.error(m) + } -/***/ 64148: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// Once time initialization +if (!fs[gracefulQueue]) { + // This queue can be shared by multiple loaded instances + var queue = global[gracefulQueue] || [] + publishQueue(fs, queue) -var util = __nccwpck_require__(73837); -var PassThrough = __nccwpck_require__(58721); + // Patch fs.close/closeSync to shared queue version, because we need + // to retry() whenever a close happens *anywhere* in the program. + // This is essential when multiple graceful-fs instances are + // in play at the same time. + fs.close = (function (fs$close) { + function close (fd, cb) { + return fs$close.call(fs, fd, function (err) { + // This function uses the graceful-fs shared queue + if (!err) { + resetQueue() + } -module.exports = { - Readable: Readable, - Writable: Writable -}; + if (typeof cb === 'function') + cb.apply(this, arguments) + }) + } -util.inherits(Readable, PassThrough); -util.inherits(Writable, PassThrough); + Object.defineProperty(close, previousSymbol, { + value: fs$close + }) + return close + })(fs.close) -// Patch the given method of instance so that the callback -// is executed once, before the actual method is called the -// first time. -function beforeFirstCall(instance, method, callback) { - instance[method] = function() { - delete instance[method]; - callback.apply(this, arguments); - return this[method].apply(this, arguments); - }; -} + fs.closeSync = (function (fs$closeSync) { + function closeSync (fd) { + // This function uses the graceful-fs shared queue + fs$closeSync.apply(fs, arguments) + resetQueue() + } -function Readable(fn, options) { - if (!(this instanceof Readable)) - return new Readable(fn, options); + Object.defineProperty(closeSync, previousSymbol, { + value: fs$closeSync + }) + return closeSync + })(fs.closeSync) - PassThrough.call(this, options); + if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { + process.on('exit', function() { + debug(fs[gracefulQueue]) + __nccwpck_require__(39491).equal(fs[gracefulQueue].length, 0) + }) + } +} - beforeFirstCall(this, '_read', function() { - var source = fn.call(this, options); - var emit = this.emit.bind(this, 'error'); - source.on('error', emit); - source.pipe(this); - }); +if (!global[gracefulQueue]) { + publishQueue(global, fs[gracefulQueue]); +} - this.emit('readable'); +module.exports = patch(clone(fs)) +if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { + module.exports = patch(fs) + fs.__patched = true; } -function Writable(fn, options) { - if (!(this instanceof Writable)) - return new Writable(fn, options); +function patch (fs) { + // Everything that references the open() function needs to be in here + polyfills(fs) + fs.gracefulify = patch - PassThrough.call(this, options); + fs.createReadStream = createReadStream + fs.createWriteStream = createWriteStream + var fs$readFile = fs.readFile + fs.readFile = readFile + function readFile (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null - beforeFirstCall(this, '_write', function() { - var destination = fn.call(this, options); - var emit = this.emit.bind(this, 'error'); - destination.on('error', emit); - this.pipe(destination); - }); + return go$readFile(path, options, cb) - this.emit('writable'); -} + function go$readFile (path, options, cb, startTime) { + return fs$readFile(path, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + var fs$writeFile = fs.writeFile + fs.writeFile = writeFile + function writeFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + return go$writeFile(path, data, options, cb) -/***/ }), + function go$writeFile (path, data, options, cb, startTime) { + return fs$writeFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } -/***/ 2541: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var fs$appendFile = fs.appendFile + if (fs$appendFile) + fs.appendFile = appendFile + function appendFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + return go$appendFile(path, data, options, cb) -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. + function go$appendFile (path, data, options, cb, startTime) { + return fs$appendFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + var fs$copyFile = fs.copyFile + if (fs$copyFile) + fs.copyFile = copyFile + function copyFile (src, dest, flags, cb) { + if (typeof flags === 'function') { + cb = flags + flags = 0 + } + return go$copyFile(src, dest, flags, cb) + function go$copyFile (src, dest, flags, cb, startTime) { + return fs$copyFile(src, dest, flags, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } -/**/ + var fs$readdir = fs.readdir + fs.readdir = readdir + var noReaddirOptionVersions = /^v[0-5]\./ + function readdir (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null -var pna = __nccwpck_require__(51831); -/**/ + var go$readdir = noReaddirOptionVersions.test(process.version) + ? function go$readdir (path, options, cb, startTime) { + return fs$readdir(path, fs$readdirCallback( + path, options, cb, startTime + )) + } + : function go$readdir (path, options, cb, startTime) { + return fs$readdir(path, options, fs$readdirCallback( + path, options, cb, startTime + )) + } -/**/ -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/**/ + return go$readdir(path, options, cb) -module.exports = Duplex; + function fs$readdirCallback (path, options, cb, startTime) { + return function (err, files) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([ + go$readdir, + [path, options, cb], + err, + startTime || Date.now(), + Date.now() + ]) + else { + if (files && files.sort) + files.sort() -/**/ -var util = Object.create(__nccwpck_require__(76844)); -util.inherits = __nccwpck_require__(81619); -/**/ + if (typeof cb === 'function') + cb.call(this, err, files) + } + } + } + } -var Readable = __nccwpck_require__(95952); -var Writable = __nccwpck_require__(5618); + if (process.version.substr(0, 4) === 'v0.8') { + var legStreams = legacy(fs) + ReadStream = legStreams.ReadStream + WriteStream = legStreams.WriteStream + } -util.inherits(Duplex, Readable); + var fs$ReadStream = fs.ReadStream + if (fs$ReadStream) { + ReadStream.prototype = Object.create(fs$ReadStream.prototype) + ReadStream.prototype.open = ReadStream$open + } -{ - // avoid scope creep, the keys array can then be collected - var keys = objectKeys(Writable.prototype); - for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + var fs$WriteStream = fs.WriteStream + if (fs$WriteStream) { + WriteStream.prototype = Object.create(fs$WriteStream.prototype) + WriteStream.prototype.open = WriteStream$open } -} -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); + Object.defineProperty(fs, 'ReadStream', { + get: function () { + return ReadStream + }, + set: function (val) { + ReadStream = val + }, + enumerable: true, + configurable: true + }) + Object.defineProperty(fs, 'WriteStream', { + get: function () { + return WriteStream + }, + set: function (val) { + WriteStream = val + }, + enumerable: true, + configurable: true + }) - Readable.call(this, options); - Writable.call(this, options); + // legacy names + var FileReadStream = ReadStream + Object.defineProperty(fs, 'FileReadStream', { + get: function () { + return FileReadStream + }, + set: function (val) { + FileReadStream = val + }, + enumerable: true, + configurable: true + }) + var FileWriteStream = WriteStream + Object.defineProperty(fs, 'FileWriteStream', { + get: function () { + return FileWriteStream + }, + set: function (val) { + FileWriteStream = val + }, + enumerable: true, + configurable: true + }) - if (options && options.readable === false) this.readable = false; + function ReadStream (path, options) { + if (this instanceof ReadStream) + return fs$ReadStream.apply(this, arguments), this + else + return ReadStream.apply(Object.create(ReadStream.prototype), arguments) + } - if (options && options.writable === false) this.writable = false; + function ReadStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + if (that.autoClose) + that.destroy() - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + that.read() + } + }) + } - this.once('end', onend); -} + function WriteStream (path, options) { + if (this instanceof WriteStream) + return fs$WriteStream.apply(this, arguments), this + else + return WriteStream.apply(Object.create(WriteStream.prototype), arguments) + } -Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._writableState.highWaterMark; + function WriteStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + that.destroy() + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + } + }) } -}); -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; + function createReadStream (path, options) { + return new fs.ReadStream(path, options) + } - // no more data can be written. - // But allow more writes to happen in this tick. - pna.nextTick(onEndNT, this); -} + function createWriteStream (path, options) { + return new fs.WriteStream(path, options) + } -function onEndNT(self) { - self.end(); -} + var fs$open = fs.open + fs.open = open + function open (path, flags, mode, cb) { + if (typeof mode === 'function') + cb = mode, mode = null -Object.defineProperty(Duplex.prototype, 'destroyed', { - get: function () { - if (this._readableState === undefined || this._writableState === undefined) { - return false; - } - return this._readableState.destroyed && this._writableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (this._readableState === undefined || this._writableState === undefined) { - return; - } + return go$open(path, flags, mode, cb) - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; - this._writableState.destroyed = value; + function go$open (path, flags, mode, cb, startTime) { + return fs$open(path, flags, mode, function (err, fd) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } } -}); - -Duplex.prototype._destroy = function (err, cb) { - this.push(null); - this.end(); - pna.nextTick(cb, err); -}; + return fs +} -/***/ }), +function enqueue (elem) { + debug('ENQUEUE', elem[0].name, elem[1]) + fs[gracefulQueue].push(elem) + retry() +} -/***/ 73018: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// keep track of the timeout between retry() calls +var retryTimer -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. - - - -module.exports = PassThrough; +// reset the startTime and lastTime to now +// this resets the start of the 60 second overall timeout as well as the +// delay between attempts so that we'll retry these jobs sooner +function resetQueue () { + var now = Date.now() + for (var i = 0; i < fs[gracefulQueue].length; ++i) { + // entries that are only a length of 2 are from an older version, don't + // bother modifying those since they'll be retried anyway. + if (fs[gracefulQueue][i].length > 2) { + fs[gracefulQueue][i][3] = now // startTime + fs[gracefulQueue][i][4] = now // lastTime + } + } + // call retry to make sure we're actively processing the queue + retry() +} -var Transform = __nccwpck_require__(1909); +function retry () { + // clear the timer and remove it to help prevent unintended concurrency + clearTimeout(retryTimer) + retryTimer = undefined -/**/ -var util = Object.create(__nccwpck_require__(76844)); -util.inherits = __nccwpck_require__(81619); -/**/ + if (fs[gracefulQueue].length === 0) + return -util.inherits(PassThrough, Transform); + var elem = fs[gracefulQueue].shift() + var fn = elem[0] + var args = elem[1] + // these items may be unset if they were added by an older graceful-fs + var err = elem[2] + var startTime = elem[3] + var lastTime = elem[4] -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); + // if we don't have a startTime we have no way of knowing if we've waited + // long enough, so go ahead and retry this item now + if (startTime === undefined) { + debug('RETRY', fn.name, args) + fn.apply(null, args) + } else if (Date.now() - startTime >= 60000) { + // it's been more than 60 seconds total, bail now + debug('TIMEOUT', fn.name, args) + var cb = args.pop() + if (typeof cb === 'function') + cb.call(null, err) + } else { + // the amount of time between the last attempt and right now + var sinceAttempt = Date.now() - lastTime + // the amount of time between when we first tried, and when we last tried + // rounded up to at least 1 + var sinceStart = Math.max(lastTime - startTime, 1) + // backoff. wait longer than the total time we've been retrying, but only + // up to a maximum of 100ms + var desiredDelay = Math.min(sinceStart * 1.2, 100) + // it's been long enough since the last retry, do it again + if (sinceAttempt >= desiredDelay) { + debug('RETRY', fn.name, args) + fn.apply(null, args.concat([startTime])) + } else { + // if we can't do this job yet, push it to the end of the queue + // and let the next iteration check again + fs[gracefulQueue].push(elem) + } + } - Transform.call(this, options); + // schedule our next run if one isn't already scheduled + if (retryTimer === undefined) { + retryTimer = setTimeout(retry, 0) + } } -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; /***/ }), -/***/ 95952: +/***/ 22492: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +var Stream = (__nccwpck_require__(12781).Stream) +module.exports = legacy +function legacy (fs) { + return { + ReadStream: ReadStream, + WriteStream: WriteStream + } -/**/ + function ReadStream (path, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path, options); -var pna = __nccwpck_require__(51831); -/**/ + Stream.call(this); -module.exports = Readable; + var self = this; -/**/ -var isArray = __nccwpck_require__(87209); -/**/ + this.path = path; + this.fd = null; + this.readable = true; + this.paused = false; -/**/ -var Duplex; -/**/ + this.flags = 'r'; + this.mode = 438; /*=0666*/ + this.bufferSize = 64 * 1024; -Readable.ReadableState = ReadableState; + options = options || {}; -/**/ -var EE = (__nccwpck_require__(82361).EventEmitter); + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/**/ + if (this.encoding) this.setEncoding(this.encoding); -/**/ -var Stream = __nccwpck_require__(84785); -/**/ + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.end === undefined) { + this.end = Infinity; + } else if ('number' !== typeof this.end) { + throw TypeError('end must be a Number'); + } -/**/ + if (this.start > this.end) { + throw new Error('start must be <= end'); + } -var Buffer = (__nccwpck_require__(4539).Buffer); -var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; -function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk); -} -function _isUint8Array(obj) { - return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; -} + this.pos = this.start; + } -/**/ + if (this.fd !== null) { + process.nextTick(function() { + self._read(); + }); + return; + } -/**/ -var util = Object.create(__nccwpck_require__(76844)); -util.inherits = __nccwpck_require__(81619); -/**/ + fs.open(this.path, this.flags, this.mode, function (err, fd) { + if (err) { + self.emit('error', err); + self.readable = false; + return; + } -/**/ -var debugUtil = __nccwpck_require__(73837); -var debug = void 0; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; -} -/**/ + self.fd = fd; + self.emit('open', fd); + self._read(); + }) + } -var BufferList = __nccwpck_require__(97778); -var destroyImpl = __nccwpck_require__(39859); -var StringDecoder; + function WriteStream (path, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path, options); -util.inherits(Readable, Stream); + Stream.call(this); -var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + this.path = path; + this.fd = null; + this.writable = true; -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + this.flags = 'w'; + this.encoding = 'binary'; + this.mode = 438; /*=0666*/ + this.bytesWritten = 0; - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; -} + options = options || {}; -function ReadableState(options, stream) { - Duplex = Duplex || __nccwpck_require__(2541); + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } - options = options || {}; + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.start < 0) { + throw new Error('start must be >= zero'); + } - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - var isDuplex = stream instanceof Duplex; + this.pos = this.start; + } - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; + this.busy = false; + this._queue = []; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + if (this.fd === null) { + this._open = fs.open; + this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); + this.flush(); + } + } +} - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var readableHwm = options.readableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; +/***/ }), - // cast to ints. - this.highWaterMark = Math.floor(this.highWaterMark); +/***/ 20307: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; +var constants = __nccwpck_require__(22057) - // a flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - this.sync = true; +var origCwd = process.cwd +var cwd = null - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; +var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform - // has it been destroyed - this.destroyed = false; +process.cwd = function() { + if (!cwd) + cwd = origCwd.call(process) + return cwd +} +try { + process.cwd() +} catch (er) {} - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; +// This check is needed until node.js 12 is required +if (typeof process.chdir === 'function') { + var chdir = process.chdir + process.chdir = function (d) { + cwd = null + chdir.call(process, d) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) +} - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; +module.exports = patch - // if true, a maybeReadMore has been scheduled - this.readingMore = false; +function patch (fs) { + // (re-)implement some things that are known busted or missing. - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = (__nccwpck_require__(79335)/* .StringDecoder */ .s); - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; + // lchmod, broken prior to 0.6.2 + // back-port the fix here. + if (constants.hasOwnProperty('O_SYMLINK') && + process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { + patchLchmod(fs) } -} -function Readable(options) { - Duplex = Duplex || __nccwpck_require__(2541); + // lutimes implementation, or no-op + if (!fs.lutimes) { + patchLutimes(fs) + } - if (!(this instanceof Readable)) return new Readable(options); + // https://github.com/isaacs/node-graceful-fs/issues/4 + // Chown should not fail on einval or eperm if non-root. + // It should not fail on enosys ever, as this just indicates + // that a fs doesn't support the intended operation. - this._readableState = new ReadableState(options, this); + fs.chown = chownFix(fs.chown) + fs.fchown = chownFix(fs.fchown) + fs.lchown = chownFix(fs.lchown) - // legacy - this.readable = true; + fs.chmod = chmodFix(fs.chmod) + fs.fchmod = chmodFix(fs.fchmod) + fs.lchmod = chmodFix(fs.lchmod) - if (options) { - if (typeof options.read === 'function') this._read = options.read; + fs.chownSync = chownFixSync(fs.chownSync) + fs.fchownSync = chownFixSync(fs.fchownSync) + fs.lchownSync = chownFixSync(fs.lchownSync) - if (typeof options.destroy === 'function') this._destroy = options.destroy; - } + fs.chmodSync = chmodFixSync(fs.chmodSync) + fs.fchmodSync = chmodFixSync(fs.fchmodSync) + fs.lchmodSync = chmodFixSync(fs.lchmodSync) - Stream.call(this); -} + fs.stat = statFix(fs.stat) + fs.fstat = statFix(fs.fstat) + fs.lstat = statFix(fs.lstat) -Object.defineProperty(Readable.prototype, 'destroyed', { - get: function () { - if (this._readableState === undefined) { - return false; + fs.statSync = statFixSync(fs.statSync) + fs.fstatSync = statFixSync(fs.fstatSync) + fs.lstatSync = statFixSync(fs.lstatSync) + + // if lchmod/lchown do not exist, then make them no-ops + if (fs.chmod && !fs.lchmod) { + fs.lchmod = function (path, mode, cb) { + if (cb) process.nextTick(cb) } - return this._readableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._readableState) { - return; + fs.lchmodSync = function () {} + } + if (fs.chown && !fs.lchown) { + fs.lchown = function (path, uid, gid, cb) { + if (cb) process.nextTick(cb) } - - // backward compatibility, the user is explicitly - // managing destroyed - this._readableState.destroyed = value; + fs.lchownSync = function () {} } -}); -Readable.prototype.destroy = destroyImpl.destroy; -Readable.prototype._undestroy = destroyImpl.undestroy; -Readable.prototype._destroy = function (err, cb) { - this.push(null); - cb(err); -}; + // on Windows, A/V software can lock the directory, causing this + // to fail with an EACCES or EPERM if the directory contains newly + // created files. Try again on failure, for up to 60 seconds. -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; + // Set the timeout this long because some Windows Anti-Virus, such as Parity + // bit9, may lock files for up to a minute, causing npm package install + // failures. Also, take care to yield the scheduler. Windows scheduling gives + // CPU to a busy looping process, which can cause the program causing the lock + // contention to be starved of CPU by node, so the contention doesn't resolve. + if (platform === "win32") { + fs.rename = typeof fs.rename !== 'function' ? fs.rename + : (function (fs$rename) { + function rename (from, to, cb) { + var start = Date.now() + var backoff = 0; + fs$rename(from, to, function CB (er) { + if (er + && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") + && Date.now() - start < 60000) { + setTimeout(function() { + fs.stat(to, function (stater, st) { + if (stater && stater.code === "ENOENT") + fs$rename(from, to, CB); + else + cb(er) + }) + }, backoff) + if (backoff < 100) + backoff += 10; + return; + } + if (cb) cb(er) + }) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) + return rename + })(fs.rename) + } - if (!state.objectMode) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = Buffer.from(chunk, encoding); - encoding = ''; + // if read() returns EAGAIN, then just try it again. + fs.read = typeof fs.read !== 'function' ? fs.read + : (function (fs$read) { + function read (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) + } } - skipChunkCheck = true; + return fs$read.call(fs, fd, buffer, offset, length, position, callback) } - } else { - skipChunkCheck = true; - } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); -}; + // This ensures `util.promisify` works as it does for native `fs.read`. + if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) + return read + })(fs.read) -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - return readableAddChunk(this, chunk, null, true, false); -}; + fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync + : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { + var eagCounter = 0 + while (true) { + try { + return fs$readSync.call(fs, fd, buffer, offset, length, position) + } catch (er) { + if (er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + continue + } + throw er + } + } + }})(fs.readSync) -function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - var state = stream._readableState; - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } + function patchLchmod (fs) { + fs.lchmod = function (path, mode, callback) { + fs.open( path + , constants.O_WRONLY | constants.O_SYMLINK + , mode + , function (err, fd) { + if (err) { + if (callback) callback(err) + return + } + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, function (err) { + fs.close(fd, function(err2) { + if (callback) callback(err || err2) + }) + }) + }) + } - if (addToFront) { - if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); - } else if (state.ended) { - stream.emit('error', new Error('stream.push() after EOF')); - } else { - state.reading = false; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + fs.lchmodSync = function (path, mode) { + var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) + + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + var threw = true + var ret + try { + ret = fs.fchmodSync(fd, mode) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} } else { - addChunk(stream, state, chunk, false); + fs.closeSync(fd) } } - } else if (!addToFront) { - state.reading = false; + return ret } } - return needMoreData(state); -} + function patchLutimes (fs) { + if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { + fs.lutimes = function (path, at, mt, cb) { + fs.open(path, constants.O_SYMLINK, function (er, fd) { + if (er) { + if (cb) cb(er) + return + } + fs.futimes(fd, at, mt, function (er) { + fs.close(fd, function (er2) { + if (cb) cb(er || er2) + }) + }) + }) + } -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + fs.lutimesSync = function (path, at, mt) { + var fd = fs.openSync(path, constants.O_SYMLINK) + var ret + var threw = true + try { + ret = fs.futimesSync(fd, at, mt) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } - if (state.needReadable) emitReadable(stream); + } else if (fs.futimes) { + fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } + fs.lutimesSync = function () {} + } } - maybeReadMore(stream, state); -} -function chunkInvalid(state, chunk) { - var er; - if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); + function chmodFix (orig) { + if (!orig) return orig + return function (target, mode, cb) { + return orig.call(fs, target, mode, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } } - return er; -} - -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); -} -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; + function chmodFixSync (orig) { + if (!orig) return orig + return function (target, mode) { + try { + return orig.call(fs, target, mode) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = (__nccwpck_require__(79335)/* .StringDecoder */ .s); - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; -}; -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; + function chownFix (orig) { + if (!orig) return orig + return function (target, uid, gid, cb) { + return orig.call(fs, target, uid, gid, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } } - return n; -} -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - // Don't have enough - if (!state.ended) { - state.needReadable = true; - return 0; + function chownFixSync (orig) { + if (!orig) return orig + return function (target, uid, gid) { + try { + return orig.call(fs, target, uid, gid) + } catch (er) { + if (!chownErOk(er)) throw er + } + } } - return state.length; -} - -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - - if (n !== 0) state.emittedReadable = false; - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; + function statFix (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + function callback (er, stats) { + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + if (cb) cb.apply(this, arguments) + } + return options ? orig.call(fs, target, options, callback) + : orig.call(fs, target, callback) + } } - n = howMuchToRead(n, state); - - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; + function statFixSync (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options) { + var stats = options ? orig.call(fs, target, options) + : orig.call(fs, target) + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + return stats; + } } - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. + // ENOSYS means that the fs doesn't support the op. Just ignore + // that, because it doesn't matter. // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. + // if there's no getuid, or if getuid() is something other + // than 0, and the error is EINVAL or EPERM, then just ignore + // it. // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. + // This specific case is a silent failure in cp, install, tar, + // and most other unix tools that manage permissions. // - // 3. Actually pull the requested chunks out of the buffer and return. + // When running as root, or if other types of errors are + // encountered, then it's strict. + function chownErOk (er) { + if (!er) + return true - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); + if (er.code === "ENOSYS") + return true - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } + var nonroot = !process.getuid || process.getuid() !== 0 + if (nonroot) { + if (er.code === "EINVAL" || er.code === "EPERM") + return true + } - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); + return false } +} - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = true; - n = 0; - } else { - state.length -= n; - } - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; +/***/ }), - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this); - } +/***/ 50503: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - if (ret !== null) this.emit('data', ret); +"use strict"; - return ret; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; }; - -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(41808)); +const tls = __importStar(__nccwpck_require__(24404)); +const debug_1 = __importDefault(__nccwpck_require__(97785)); +const events_1 = __nccwpck_require__(82361); +const agent_base_1 = __nccwpck_require__(24557); +const url_1 = __nccwpck_require__(57310); +const debug = (0, debug_1.default)('http-proxy-agent'); +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + addRequest(req, opts) { + req._header = null; + this.setRequestProps(req, opts); + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + super.addRequest(req, opts); + } + setRequestProps(req, opts) { + const { proxy } = this; + const protocol = opts.secureEndpoint ? 'https:' : 'http:'; + const hostname = req.getHeader('host') || 'localhost'; + const base = `${protocol}//${hostname}`; + const url = new url_1.URL(req.path, base); + if (opts.port !== 80) { + url.port = String(opts.port); + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = String(url); + // Inject the `Proxy-Authorization` header if necessary. + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + const value = headers[name]; + if (value) { + req.setHeader(name, value); + } + } + } + async connect(req, opts) { + req._header = null; + if (!req.path.includes('://')) { + this.setRequestProps(req, opts); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._implicitHeader(); + if (req.outputData && req.outputData.length > 0) { + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + // Create a socket connection to the proxy server. + let socket; + if (this.proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(this.connectOpts); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + await (0, events_1.once)(socket, 'connect'); + return socket; } - } - state.ended = true; - - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); } - -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); - } +HttpProxyAgent.protocols = ['http', 'https']; +exports.HttpProxyAgent = HttpProxyAgent; +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; } +//# sourceMappingURL=index.js.map -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} +/***/ }), -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - pna.nextTick(maybeReadMore_, stream, state); - } -} +/***/ 78946: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; - } - state.readingMore = false; -} +"use strict"; -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('_read() is not implemented')); +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; }; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpsProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(41808)); +const tls = __importStar(__nccwpck_require__(24404)); +const assert_1 = __importDefault(__nccwpck_require__(39491)); +const debug_1 = __importDefault(__nccwpck_require__(97785)); +const agent_base_1 = __nccwpck_require__(24557); +const url_1 = __nccwpck_require__(57310); +const parse_proxy_response_1 = __nccwpck_require__(91028); +const debug = (0, debug_1.default)('https-proxy-agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + const servername = this.connectOpts.servername || this.connectOpts.host; + socket = tls.connect({ + ...this.connectOpts, + servername: servername && net.isIP(servername) ? undefined : servername, + }); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls.connect({ + ...omit(opts, 'host', 'path', 'port'), + socket, + servername: net.isIP(servername) ? undefined : servername, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; +/***/ }), - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); +/***/ 91028: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; +"use strict"; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(__nccwpck_require__(97785)); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map - dest.on('unpipe', onunpipe); - function onunpipe(readable, unpipeInfo) { - debug('onunpipe'); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } +/***/ }), - function onend() { - debug('onend'); - dest.end(); - } +/***/ 92150: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); +try { + var util = __nccwpck_require__(73837); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(88531); +} - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', unpipe); - src.removeListener('data', ondata); - cleanedUp = true; +/***/ }), - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } +/***/ 88531: +/***/ ((module) => { - // If the user pushes more data while we're writing to dest then we'll end up - // in ondata again. However, we only want to increase awaitDrain once because - // dest will only emit one 'drain' event for the multiple writes. - // => Introduce a guard on increasing awaitDrain. - var increasedAwaitDrain = false; - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - increasedAwaitDrain = false; - var ret = dest.write(chunk); - if (false === ret && !increasedAwaitDrain) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', state.awaitDrain); - state.awaitDrain++; - increasedAwaitDrain = true; - } - src.pause(); +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor } } +} - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); +/***/ }), - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); +/***/ 63032: +/***/ ((__unused_webpack_module, exports) => { - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } +"use strict"; - // tell the dest that it's being piped to - dest.emit('pipe', src); - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - return dest; -}; +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; +function isObject(o) { + return Object.prototype.toString.call(o) === '[object Object]'; } -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - var unpipeInfo = { hasUnpiped: false }; +function isPlainObject(o) { + var ctor,prot; - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; + if (isObject(o) === false) return false; - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; + // If has modified constructor + ctor = o.constructor; + if (ctor === undefined) return true; - if (!dest) dest = state.pipes; + // If has modified prototype + prot = ctor.prototype; + if (isObject(prot) === false) return false; - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this, unpipeInfo); - return this; + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; } - // slow case. multiple pipe destinations. + // Most likely a plain Object + return true; +} - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; +exports.isPlainObject = isPlainObject; - for (var i = 0; i < len; i++) { - dests[i].emit('unpipe', this, { hasUnpiped: false }); - }return this; - } - // try to find the right one. - var index = indexOf(state.pipes, dest); - if (index === -1) return this; +/***/ }), - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; +/***/ 16425: +/***/ ((module) => { - dest.emit('unpipe', this, unpipeInfo); +"use strict"; - return this; -}; -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; - if (ev === 'data') { - // Start flowing on next tick if stream isn't explicitly paused - if (this._readableState.flowing !== false) this.resume(); - } else if (ev === 'readable') { - var state = this._readableState; - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.emittedReadable = false; - if (!state.reading) { - pna.nextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this); - } - } - } +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); - } - return this; -}; +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - pna.nextTick(resume_, stream, state); - } -} +module.exports = isStream; -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); - } - state.resumeScheduled = false; - state.awaitDrain = 0; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} +/***/ }), -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; +/***/ 97305: +/***/ ((module) => { + +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; }; -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - while (state.flowing && stream.read() !== null) {} -} -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var _this = this; +/***/ }), - var state = this._readableState; - var paused = false; +/***/ 33675: +/***/ ((module) => { - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); - } +"use strict"; +function e(e){this.message=e}e.prototype=new Error,e.prototype.name="InvalidCharacterError";var r="undefined"!=typeof window&&window.atob&&window.atob.bind(window)||function(r){var t=String(r).replace(/=+$/,"");if(t.length%4==1)throw new e("'atob' failed: The string to be decoded is not correctly encoded.");for(var n,o,a=0,i=0,c="";o=t.charAt(i++);~o&&(n=a%4?64*n+o:o,a++%4)?c+=String.fromCharCode(255&n>>(-2*a&6)):0)o="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=".indexOf(o);return c};function t(e){var t=e.replace(/-/g,"+").replace(/_/g,"/");switch(t.length%4){case 0:break;case 2:t+="==";break;case 3:t+="=";break;default:throw"Illegal base64url string!"}try{return function(e){return decodeURIComponent(r(e).replace(/(.)/g,(function(e,r){var t=r.charCodeAt(0).toString(16).toUpperCase();return t.length<2&&(t="0"+t),"%"+t})))}(t)}catch(e){return r(t)}}function n(e){this.message=e}function o(e,r){if("string"!=typeof e)throw new n("Invalid token specified");var o=!0===(r=r||{}).header?0:1;try{return JSON.parse(t(e.split(".")[o]))}catch(e){throw new n("Invalid token specified: "+e.message)}}n.prototype=new Error,n.prototype.name="InvalidTokenError";const a=o;a.default=o,a.InvalidTokenError=n,module.exports=a; +//# sourceMappingURL=jwt-decode.cjs.js.map - _this.push(null); - }); - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); +/***/ }), - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; +/***/ 92058: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - var ret = _this.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); +var util = __nccwpck_require__(73837); +var PassThrough = __nccwpck_require__(12972); - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } +module.exports = { + Readable: Readable, + Writable: Writable +}; - // proxy certain important events. - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } +util.inherits(Readable, PassThrough); +util.inherits(Writable, PassThrough); - // when we try to consume some more bytes, simply unpause the - // underlying stream. - this._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } +// Patch the given method of instance so that the callback +// is executed once, before the actual method is called the +// first time. +function beforeFirstCall(instance, method, callback) { + instance[method] = function() { + delete instance[method]; + callback.apply(this, arguments); + return this[method].apply(this, arguments); }; +} - return this; -}; +function Readable(fn, options) { + if (!(this instanceof Readable)) + return new Readable(fn, options); -Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._readableState.highWaterMark; - } -}); + PassThrough.call(this, options); -// exposed for testing purposes only. -Readable._fromList = fromList; + beforeFirstCall(this, '_read', function() { + var source = fn.call(this, options); + var emit = this.emit.bind(this, 'error'); + source.on('error', emit); + source.pipe(this); + }); -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; + this.emit('readable'); +} - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = fromListPartial(n, state.buffer, state.decoder); - } +function Writable(fn, options) { + if (!(this instanceof Writable)) + return new Writable(fn, options); - return ret; -} + PassThrough.call(this, options); -// Extracts only enough buffered data to satisfy the amount requested. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromListPartial(n, list, hasStrings) { - var ret; - if (n < list.head.data.length) { - // slice is the same for buffers and strings - ret = list.head.data.slice(0, n); - list.head.data = list.head.data.slice(n); - } else if (n === list.head.data.length) { - // first chunk is a perfect match - ret = list.shift(); - } else { - // result spans more than one buffer - ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); - } - return ret; -} + beforeFirstCall(this, '_write', function() { + var destination = fn.call(this, options); + var emit = this.emit.bind(this, 'error'); + destination.on('error', emit); + this.pipe(destination); + }); -// Copies a specified amount of characters from the list of buffered data -// chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBufferString(n, list) { - var p = list.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = str.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; + this.emit('writable'); } -// Copies a specified amount of bytes from the list of buffered data chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBuffer(n, list) { - var ret = Buffer.allocUnsafe(n); - var p = list.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = buf.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); - - if (!state.endEmitted) { - state.ended = true; - pna.nextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - } -} -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - return -1; -} /***/ }), -/***/ 1909: +/***/ 32295: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -65977,203 +59427,174 @@ function indexOf(xs, x) { // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. -module.exports = Transform; +/**/ -var Duplex = __nccwpck_require__(2541); +var pna = __nccwpck_require__(22927); +/**/ /**/ -var util = Object.create(__nccwpck_require__(76844)); -util.inherits = __nccwpck_require__(81619); +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; /**/ -util.inherits(Transform, Duplex); - -function afterTransform(er, data) { - var ts = this._transformState; - ts.transforming = false; - - var cb = ts.writecb; - - if (!cb) { - return this.emit('error', new Error('write callback called multiple times')); - } +module.exports = Duplex; - ts.writechunk = null; - ts.writecb = null; +/**/ +var util = Object.create(__nccwpck_require__(68610)); +util.inherits = __nccwpck_require__(92150); +/**/ - if (data != null) // single equals check for both `null` and `undefined` - this.push(data); +var Readable = __nccwpck_require__(59018); +var Writable = __nccwpck_require__(42359); - cb(er); +util.inherits(Duplex, Readable); - var rs = this._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - this._read(rs.highWaterMark); +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; } } -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); - Duplex.call(this, options); + Readable.call(this, options); + Writable.call(this, options); - this._transformState = { - afterTransform: afterTransform.bind(this), - needTransform: false, - transforming: false, - writecb: null, - writechunk: null, - writeencoding: null - }; + if (options && options.readable === false) this.readable = false; - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; + if (options && options.writable === false) this.writable = false; - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; + this.once('end', onend); +} - if (typeof options.flush === 'function') this._flush = options.flush; +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; } +}); - // When the writable side finishes, then flush out anything remaining. - this.on('prefinish', prefinish); +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); } -function prefinish() { - var _this = this; +function onEndNT(self) { + self.end(); +} - if (typeof this._flush === 'function') { - this._flush(function (er, data) { - done(_this, er, data); - }); - } else { - done(this, null, null); +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; } -} +}); -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); }; -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. +/***/ }), + +/***/ 46969: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. // -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: // -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('_transform() is not implemented'); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; -Transform.prototype._destroy = function (err, cb) { - var _this2 = this; - Duplex.prototype._destroy.call(this, err, function (err2) { - cb(err2); - _this2.emit('close'); - }); -}; +module.exports = PassThrough; -function done(stream, er, data) { - if (er) return stream.emit('error', er); +var Transform = __nccwpck_require__(53183); - if (data != null) // single equals check for both `null` and `undefined` - stream.push(data); +/**/ +var util = Object.create(__nccwpck_require__(68610)); +util.inherits = __nccwpck_require__(92150); +/**/ - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); +util.inherits(PassThrough, Transform); - if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); - return stream.push(null); + Transform.call(this, options); } +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + /***/ }), -/***/ 5618: +/***/ 59018: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; @@ -66198,68 +59619,40 @@ function done(stream, er, data) { // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - /**/ -var pna = __nccwpck_require__(51831); +var pna = __nccwpck_require__(22927); /**/ -module.exports = Writable; - -/* */ -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} - -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - this.finish = function () { - onCorkedFinish(_this, state); - }; -} -/* */ +module.exports = Readable; /**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +var isArray = __nccwpck_require__(97305); /**/ /**/ var Duplex; /**/ -Writable.WritableState = WritableState; +Readable.ReadableState = ReadableState; /**/ -var util = Object.create(__nccwpck_require__(76844)); -util.inherits = __nccwpck_require__(81619); -/**/ +var EE = (__nccwpck_require__(82361).EventEmitter); -/**/ -var internalUtil = { - deprecate: __nccwpck_require__(93620) +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; }; /**/ /**/ -var Stream = __nccwpck_require__(84785); +var Stream = __nccwpck_require__(99737); /**/ /**/ -var Buffer = (__nccwpck_require__(4539).Buffer); +var Buffer = (__nccwpck_require__(51594).Buffer); var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; function _uint8ArrayToBuffer(chunk) { return Buffer.from(chunk); @@ -66270,14 +59663,43 @@ function _isUint8Array(obj) { /**/ -var destroyImpl = __nccwpck_require__(39859); +/**/ +var util = Object.create(__nccwpck_require__(68610)); +util.inherits = __nccwpck_require__(92150); +/**/ -util.inherits(Writable, Stream); +/**/ +var debugUtil = __nccwpck_require__(73837); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ -function nop() {} +var BufferList = __nccwpck_require__(39215); +var destroyImpl = __nccwpck_require__(45136); +var StringDecoder; -function WritableState(options, stream) { - Duplex = Duplex || __nccwpck_require__(2541); +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || __nccwpck_require__(32295); options = options || {}; @@ -66288,872 +59710,1139 @@ function WritableState(options, stream) { // These options can be provided separately as readableXXX and writableXXX. var isDuplex = stream instanceof Duplex; - // object stream flag to indicate whether or not this stream - // contains buffers or objects. + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" var hwm = options.highWaterMark; - var writableHwm = options.writableHighWaterMark; + var readableHwm = options.readableHighWaterMark; var defaultHwm = this.objectMode ? 16 : 16 * 1024; - if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; // cast to ints. this.highWaterMark = Math.floor(this.highWaterMark); - // if _final has been called - this.finalCalled = false; - - // drain event flag. - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; this.ended = false; - // when 'finish' is emitted - this.finished = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; // has it been destroyed this.destroyed = false; - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. this.defaultEncoding = options.defaultEncoding || 'utf8'; - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; - - // a flag to see when we're in the middle of a write. - this.writing = false; - - // when true all writes will be buffered until .uncork() call - this.corked = 0; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; + // if true, a maybeReadMore has been scheduled + this.readingMore = false; - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(37386)/* .StringDecoder */ .s); + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} - // the amount that is being written when _write is called. - this.writelen = 0; +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(32295); - this.bufferedRequest = null; - this.lastBufferedRequest = null; + if (!(this instanceof Readable)) return new Readable(options); - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; + this._readableState = new ReadableState(options, this); - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; + // legacy + this.readable = true; - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; + if (options) { + if (typeof options.read === 'function') this._read = options.read; - // count buffered requests - this.bufferedRequestCount = 0; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } - // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); + Stream.call(this); } -WritableState.prototype.getBuffer = function getBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') - }); - } catch (_) {} -})(); - -// Test _writableState for inheritance to account for Duplex streams, -// whose prototype chain only points to Readable. -var realHasInstance; -if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: function (object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - - return object && object._writableState instanceof WritableState; +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; } - }); -} else { - realHasInstance = function (object) { - return object instanceof this; - }; -} - -function Writable(options) { - Duplex = Duplex || __nccwpck_require__(2541); - - // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { - return new Writable(options); + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; } +}); - this._writableState = new WritableState(options, this); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; - // legacy. - this.writable = true; +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; - if (options) { - if (typeof options.write === 'function') this._write = options.write; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } - if (typeof options.writev === 'function') this._writev = options.writev; + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; - if (typeof options.destroy === 'function') this._destroy = options.destroy; +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; - if (typeof options.final === 'function') this._final = options.final; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } } - Stream.call(this); + return needMoreData(state); } -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe, not readable')); -}; +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - pna.nextTick(cb, er); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); } -// Checks that a user-supplied chunk is valid, especially for the particular -// mode the stream is in. Currently this means that `null` is never accepted -// and undefined/non-string values are only allowed in object mode. -function validChunk(stream, state, chunk, cb) { - var valid = true; - var er = false; - - if (chunk === null) { - er = new TypeError('May not write null values to stream'); - } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { er = new TypeError('Invalid non-string/buffer chunk'); } - if (er) { - stream.emit('error', er); - pna.nextTick(cb, er); - valid = false; - } - return valid; + return er; } -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} - if (isBuf && !Buffer.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(37386)/* .StringDecoder */ .s); + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; } + return n; +} - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; } + return state.length; +} - if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; - if (typeof cb !== 'function') cb = nop; + if (n !== 0) state.emittedReadable = false; - if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; } - return ret; -}; + n = howMuchToRead(n, state); -Writable.prototype.cork = function () { - var state = this._writableState; + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } - state.corked++; -}; + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. -Writable.prototype.uncork = function () { - var state = this._writableState; + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); - if (state.corked) { - state.corked--; + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } - if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); } -}; -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = Buffer.from(chunk, encoding); + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; } - return chunk; -} -Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: function () { - return this._writableState.highWaterMark; - } -}); + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { - isBuf = true; - encoding = 'buffer'; - chunk = newChunk; - } + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); } - var len = state.objectMode ? 1 : chunk.length; - state.length += len; + if (ret !== null) this.emit('data', ret); - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; + return ret; +}; - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk: chunk, - encoding: encoding, - isBuf: isBuf, - callback: cb, - next: null - }; - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); } + state.ended = true; - return ret; + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); } -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } } -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} - if (sync) { - // defer the callback if we are being called synchronously - // to avoid piling up things on the stack - pna.nextTick(cb, er); - // this can emit finish, and it will always happen - // after error - pna.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - stream.emit('error', er); - } else { - // the caller expect this to happen before if - // it is async - cb(er); - stream._writableState.errorEmitted = true; - stream.emit('error', er); - // this can emit finish, but finish must - // always follow error - finishMaybe(stream, state); +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); } } -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; } -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; - onwriteStateUpdate(state); +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ - } else { - afterWrite(stream, state, finished, cb); - } - } -} + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} - -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } } -} -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; + function onend() { + debug('onend'); + dest.end(); + } - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); - var count = 0; - var allBuffers = true; - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - buffer.allBuffers = allBuffers; + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); - doWrite(stream, state, true, state.length, buffer, '', holder.finish); + cleanedUp = true; - // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - state.bufferedRequestCount = 0; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + increasedAwaitDrain = true; } + src.pause(); } - - if (entry === null) state.lastBufferedRequest = null; } - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('_write() is not implemented')); -}; + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } -Writable.prototype._writev = null; + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; + function unpipe() { + debug('unpipe'); + src.unpipe(dest); } - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + // tell the dest that it's being piped to + dest.emit('pipe', src); - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); } - // ignore unnecessary end() calls. - if (!state.ending) endWritable(this, state, cb); + return dest; }; -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} -function callFinal(stream, state) { - stream._final(function (err) { - state.pendingcb--; - if (err) { - stream.emit('error', err); - } - state.prefinished = true; - stream.emit('prefinish'); - finishMaybe(stream, state); - }); -} -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function') { - state.pendingcb++; - state.finalCalled = true; - pna.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit('prefinish'); - } - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - prefinish(stream, state); - if (state.pendingcb === 0) { - state.finished = true; - stream.emit('finish'); +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); } - } - return need; + }; } -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; -} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; -function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; - // reuse the free corkReq. - state.corkedRequestsFree.next = corkReq; -} + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; -Object.defineProperty(Writable.prototype, 'destroyed', { - get: function () { - if (this._writableState === undefined) { - return false; - } - return this._writableState.destroyed; - }, - set: function (value) { - // we ignore the value if the stream - // has not been initialized yet - if (!this._writableState) { - return; - } + if (!dest) dest = state.pipes; - // backward compatibility, the user is explicitly - // managing destroyed - this._writableState.destroyed = value; + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; } -}); - -Writable.prototype.destroy = destroyImpl.destroy; -Writable.prototype._undestroy = destroyImpl.undestroy; -Writable.prototype._destroy = function (err, cb) { - this.end(); - cb(err); -}; -/***/ }), + // slow case. multiple pipe destinations. -/***/ 97778: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; -"use strict"; + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { hasUnpiped: false }); + }return this; + } + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; -function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; -var Buffer = (__nccwpck_require__(4539).Buffer); -var util = __nccwpck_require__(73837); + dest.emit('unpipe', this, unpipeInfo); -function copyBuffer(src, target, offset) { - src.copy(target, offset); -} + return this; +}; -module.exports = function () { - function BufferList() { - _classCallCheck(this, BufferList); +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); - this.head = null; - this.tail = null; - this.length = 0; + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } } - BufferList.prototype.push = function push(v) { - var entry = { data: v, next: null }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; - }; - - BufferList.prototype.unshift = function unshift(v) { - var entry = { data: v, next: this.head }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - }; - - BufferList.prototype.shift = function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; - }; + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; - BufferList.prototype.clear = function clear() { - this.head = this.tail = null; - this.length = 0; - }; +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} - BufferList.prototype.join = function join(s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - while (p = p.next) { - ret += s + p.data; - }return ret; - }; +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; - BufferList.prototype.concat = function concat(n) { - if (this.length === 0) return Buffer.alloc(0); - var ret = Buffer.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - return ret; - }; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} - return BufferList; -}(); +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } -if (util && util.inspect && util.inspect.custom) { - module.exports.prototype[util.inspect.custom] = function () { - var obj = util.inspect({ length: this.length }); - return this.constructor.name + ' ' + obj; - }; + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); } -/***/ }), +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; -/***/ 39859: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} -"use strict"; +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; -/**/ + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } -var pna = __nccwpck_require__(51831); -/**/ + _this.push(null); + }); -// undocumented cb() API, needed for core, not for public API -function destroy(err, cb) { - var _this = this; + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - pna.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - pna.nextTick(emitErrorNT, this, err); - } + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); } + }); - return this; + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } } - // we set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks - - if (this._readableState) { - this._readableState.destroyed = true; + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); } - // if this is a duplex stream mark the writable part as destroyed as well - if (this._writableState) { - this._writableState.destroyed = true; - } + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; - this._destroy(err || null, function (err) { - if (!cb && err) { - if (!_this._writableState) { - pna.nextTick(emitErrorNT, _this, err); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - pna.nextTick(emitErrorNT, _this, err); + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); } - } else if (cb) { - cb(err); + break; } - }); - - return this; + ++c; + } + list.length -= c; + return ret; } -function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; } + list.length -= c; + return ret; +} - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); } } -function emitErrorNT(self, err) { - self.emit('error', err); +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } } -module.exports = { - destroy: destroy, - undestroy: undestroy -}; +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} /***/ }), -/***/ 84785: +/***/ 53183: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = __nccwpck_require__(12781); +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. -/***/ }), -/***/ 58721: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -module.exports = __nccwpck_require__(58479).PassThrough +module.exports = Transform; +var Duplex = __nccwpck_require__(32295); -/***/ }), +/**/ +var util = Object.create(__nccwpck_require__(68610)); +util.inherits = __nccwpck_require__(92150); +/**/ -/***/ 58479: -/***/ ((module, exports, __nccwpck_require__) => { +util.inherits(Transform, Duplex); -var Stream = __nccwpck_require__(12781); -if (process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream; - exports = module.exports = Stream.Readable; - exports.Readable = Stream.Readable; - exports.Writable = Stream.Writable; - exports.Duplex = Stream.Duplex; - exports.Transform = Stream.Transform; - exports.PassThrough = Stream.PassThrough; - exports.Stream = Stream; -} else { - exports = module.exports = __nccwpck_require__(95952); - exports.Stream = Stream || exports; - exports.Readable = exports; - exports.Writable = __nccwpck_require__(5618); - exports.Duplex = __nccwpck_require__(2541); - exports.Transform = __nccwpck_require__(1909); - exports.PassThrough = __nccwpck_require__(73018); -} +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; -/***/ }), + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } -/***/ 4539: -/***/ ((module, exports, __nccwpck_require__) => { + ts.writechunk = null; + ts.writecb = null; -/* eslint-disable node/no-deprecated-api */ -var buffer = __nccwpck_require__(14300) -var Buffer = buffer.Buffer + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); -// alternative to using Object.keys for old browsers -function copyProps (src, dst) { - for (var key in src) { - dst[key] = src[key] + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); } } -if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { - module.exports = buffer -} else { - // Copy properties from require('buffer') - copyProps(buffer, exports) - exports.Buffer = SafeBuffer -} -function SafeBuffer (arg, encodingOrOffset, length) { - return Buffer(arg, encodingOrOffset, length) -} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); -// Copy static methods from Buffer -copyProps(Buffer, SafeBuffer) + Duplex.call(this, options); -SafeBuffer.from = function (arg, encodingOrOffset, length) { - if (typeof arg === 'number') { - throw new TypeError('Argument must not be a number') + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; } - return Buffer(arg, encodingOrOffset, length) + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); } -SafeBuffer.alloc = function (size, fill, encoding) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') - } - var buf = Buffer(size) - if (fill !== undefined) { - if (typeof encoding === 'string') { - buf.fill(fill, encoding) - } else { - buf.fill(fill) - } +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); } else { - buf.fill(0) + done(this, null, null); } - return buf } -SafeBuffer.allocUnsafe = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); } - return Buffer(size) -} +}; -SafeBuffer.allocUnsafeSlow = function (size) { - if (typeof size !== 'number') { - throw new TypeError('Argument must be a number') +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; } - return buffer.SlowBuffer(size) -} +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} /***/ }), -/***/ 79335: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 42359: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Copyright Joyent, Inc. and other Node contributors. @@ -67177,2185 +60866,2433 @@ SafeBuffer.allocUnsafeSlow = function (size) { // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + /**/ -var Buffer = (__nccwpck_require__(4539).Buffer); +var pna = __nccwpck_require__(22927); /**/ -var isEncoding = Buffer.isEncoding || function (encoding) { - encoding = '' + encoding; - switch (encoding && encoding.toLowerCase()) { - case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': - return true; - default: - return false; - } -}; +module.exports = Writable; -function _normalizeEncoding(enc) { - if (!enc) return 'utf8'; - var retried; - while (true) { - switch (enc) { - case 'utf8': - case 'utf-8': - return 'utf8'; - case 'ucs2': - case 'ucs-2': - case 'utf16le': - case 'utf-16le': - return 'utf16le'; - case 'latin1': - case 'binary': - return 'latin1'; - case 'base64': - case 'ascii': - case 'hex': - return enc; - default: - if (retried) return; // undefined - enc = ('' + enc).toLowerCase(); - retried = true; - } - } +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = Object.create(__nccwpck_require__(68610)); +util.inherits = __nccwpck_require__(92150); +/**/ + +/**/ +var internalUtil = { + deprecate: __nccwpck_require__(15362) }; +/**/ -// Do not cache `Buffer.isEncoding` when checking encoding names as some -// modules monkey-patch it to support additional encodings -function normalizeEncoding(enc) { - var nenc = _normalizeEncoding(enc); - if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); - return nenc || enc; +/**/ +var Stream = __nccwpck_require__(99737); +/**/ + +/**/ + +var Buffer = (__nccwpck_require__(51594).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } -// StringDecoder provides an interface for efficiently splitting a series of -// buffers into a series of JS strings without breaking apart multi-byte -// characters. -exports.s = StringDecoder; -function StringDecoder(encoding) { - this.encoding = normalizeEncoding(encoding); - var nb; - switch (this.encoding) { - case 'utf16le': - this.text = utf16Text; - this.end = utf16End; - nb = 4; - break; - case 'utf8': - this.fillLast = utf8FillLast; - nb = 4; - break; - case 'base64': - this.text = base64Text; - this.end = base64End; - nb = 3; - break; - default: - this.write = simpleWrite; - this.end = simpleEnd; - return; - } - this.lastNeed = 0; - this.lastTotal = 0; - this.lastChar = Buffer.allocUnsafe(nb); -} +/**/ -StringDecoder.prototype.write = function (buf) { - if (buf.length === 0) return ''; - var r; - var i; - if (this.lastNeed) { - r = this.fillLast(buf); - if (r === undefined) return ''; - i = this.lastNeed; - this.lastNeed = 0; - } else { - i = 0; - } - if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); - return r || ''; -}; +var destroyImpl = __nccwpck_require__(45136); -StringDecoder.prototype.end = utf8End; +util.inherits(Writable, Stream); -// Returns only complete characters in a Buffer -StringDecoder.prototype.text = utf8Text; +function nop() {} -// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer -StringDecoder.prototype.fillLast = function (buf) { - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); - this.lastNeed -= buf.length; -}; +function WritableState(options, stream) { + Duplex = Duplex || __nccwpck_require__(32295); -// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a -// continuation byte. If an invalid byte is detected, -2 is returned. -function utf8CheckByte(byte) { - if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; - return byte >> 6 === 0x02 ? -1 : -2; -} + options = options || {}; -// Checks at most 3 bytes at the end of a Buffer in order to detect an -// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) -// needed to complete the UTF-8 character (if applicable) are returned. -function utf8CheckIncomplete(self, buf, i) { - var j = buf.length - 1; - if (j < i) return 0; - var nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 1; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self.lastNeed = nb - 2; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) { - if (nb === 2) nb = 0;else self.lastNeed = nb - 3; - } - return nb; - } - return 0; -} + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; -// Validates as many continuation bytes for a multi-byte UTF-8 character as -// needed or are available. If we see a non-continuation byte where we expect -// one, we "replace" the validated continuation bytes we've seen so far with -// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding -// behavior. The continuation byte check is included three times in the case -// where all of the continuation bytes for a character exist in the same buffer. -// It is also done this way as a slight performance increase instead of using a -// loop. -function utf8CheckExtraBytes(self, buf, p) { - if ((buf[0] & 0xC0) !== 0x80) { - self.lastNeed = 0; - return '\ufffd'; - } - if (self.lastNeed > 1 && buf.length > 1) { - if ((buf[1] & 0xC0) !== 0x80) { - self.lastNeed = 1; - return '\ufffd'; - } - if (self.lastNeed > 2 && buf.length > 2) { - if ((buf[2] & 0xC0) !== 0x80) { - self.lastNeed = 2; - return '\ufffd'; - } - } - } -} + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; -// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. -function utf8FillLast(buf) { - var p = this.lastTotal - this.lastNeed; - var r = utf8CheckExtraBytes(this, buf, p); - if (r !== undefined) return r; - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, p, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, p, 0, buf.length); - this.lastNeed -= buf.length; -} + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; -// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a -// partial character, the character's bytes are buffered until the required -// number of bytes are available. -function utf8Text(buf, i) { - var total = utf8CheckIncomplete(this, buf, i); - if (!this.lastNeed) return buf.toString('utf8', i); - this.lastTotal = total; - var end = buf.length - (total - this.lastNeed); - buf.copy(this.lastChar, 0, end); - return buf.toString('utf8', i, end); -} + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; -// For UTF-8, a replacement character is added when ending on a partial -// character. -function utf8End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + '\ufffd'; - return r; -} + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; -// UTF-16LE typically needs two bytes per character, but even if we have an even -// number of bytes available, we need to check if we end on a leading/high -// surrogate. In that case, we need to wait for the next two bytes in order to -// decode the last character properly. -function utf16Text(buf, i) { - if ((buf.length - i) % 2 === 0) { - var r = buf.toString('utf16le', i); - if (r) { - var c = r.charCodeAt(r.length - 1); - if (c >= 0xD800 && c <= 0xDBFF) { - this.lastNeed = 2; - this.lastTotal = 4; - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - return r.slice(0, -1); - } - } - return r; - } - this.lastNeed = 1; - this.lastTotal = 2; - this.lastChar[0] = buf[buf.length - 1]; - return buf.toString('utf16le', i, buf.length - 1); -} + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); -// For UTF-16LE we do not explicitly append special replacement characters if we -// end on a partial character, we simply let v8 handle that. -function utf16End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) { - var end = this.lastTotal - this.lastNeed; - return r + this.lastChar.toString('utf16le', 0, end); - } - return r; -} + // if _final has been called + this.finalCalled = false; -function base64Text(buf, i) { - var n = (buf.length - i) % 3; - if (n === 0) return buf.toString('base64', i); - this.lastNeed = 3 - n; - this.lastTotal = 3; - if (n === 1) { - this.lastChar[0] = buf[buf.length - 1]; - } else { - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - } - return buf.toString('base64', i, buf.length - n); -} + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; -function base64End(buf) { - var r = buf && buf.length ? this.write(buf) : ''; - if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); - return r; -} + // has it been destroyed + this.destroyed = false; -// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) -function simpleWrite(buf) { - return buf.toString(this.encoding); -} + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; -function simpleEnd(buf) { - return buf && buf.length ? this.write(buf) : ''; -} + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; -/***/ }), + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; -/***/ 44859: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // a flag to see when we're in the middle of a write. + this.writing = false; -var hashClear = __nccwpck_require__(49514), - hashDelete = __nccwpck_require__(53822), - hashGet = __nccwpck_require__(96275), - hashHas = __nccwpck_require__(26194), - hashSet = __nccwpck_require__(24116); + // when true all writes will be buffered until .uncork() call + this.corked = 0; -/** - * Creates a hash object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function Hash(entries) { - var index = -1, - length = entries == null ? 0 : entries.length; + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; -// Add methods to `Hash`. -Hash.prototype.clear = hashClear; -Hash.prototype['delete'] = hashDelete; -Hash.prototype.get = hashGet; -Hash.prototype.has = hashHas; -Hash.prototype.set = hashSet; + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; -module.exports = Hash; + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + // the amount that is being written when _write is called. + this.writelen = 0; -/***/ }), + this.bufferedRequest = null; + this.lastBufferedRequest = null; -/***/ 4901: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; -var listCacheClear = __nccwpck_require__(61671), - listCacheDelete = __nccwpck_require__(93533), - listCacheGet = __nccwpck_require__(22651), - listCacheHas = __nccwpck_require__(49577), - listCacheSet = __nccwpck_require__(39980); + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; -/** - * Creates an list cache object. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function ListCache(entries) { - var index = -1, - length = entries == null ? 0 : entries.length; + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} + // count buffered requests + this.bufferedRequestCount = 0; -// Add methods to `ListCache`. -ListCache.prototype.clear = listCacheClear; -ListCache.prototype['delete'] = listCacheDelete; -ListCache.prototype.get = listCacheGet; -ListCache.prototype.has = listCacheHas; -ListCache.prototype.set = listCacheSet; + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} -module.exports = ListCache; +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); -/***/ }), +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; -/***/ 92451: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} -var getNative = __nccwpck_require__(92682), - root = __nccwpck_require__(47491); +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(32295); -/* Built-in method references that are verified to be native. */ -var Map = getNative(root, 'Map'); + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. -module.exports = Map; + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + this._writableState = new WritableState(options, this); -/***/ }), + // legacy. + this.writable = true; -/***/ 56912: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (options) { + if (typeof options.write === 'function') this._write = options.write; -var mapCacheClear = __nccwpck_require__(50248), - mapCacheDelete = __nccwpck_require__(53775), - mapCacheGet = __nccwpck_require__(40330), - mapCacheHas = __nccwpck_require__(36592), - mapCacheSet = __nccwpck_require__(31233); + if (typeof options.writev === 'function') this._writev = options.writev; -/** - * Creates a map cache object to store key-value pairs. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function MapCache(entries) { - var index = -1, - length = entries == null ? 0 : entries.length; + if (typeof options.destroy === 'function') this._destroy = options.destroy; - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); + if (typeof options.final === 'function') this._final = options.final; } -} - -// Add methods to `MapCache`. -MapCache.prototype.clear = mapCacheClear; -MapCache.prototype['delete'] = mapCacheDelete; -MapCache.prototype.get = mapCacheGet; -MapCache.prototype.has = mapCacheHas; -MapCache.prototype.set = mapCacheSet; -module.exports = MapCache; - - -/***/ }), + Stream.call(this); +} -/***/ 109: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; -var getNative = __nccwpck_require__(92682), - root = __nccwpck_require__(47491); +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} -/* Built-in method references that are verified to be native. */ -var Set = getNative(root, 'Set'); +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; -module.exports = Set; + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); -/***/ }), + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } -/***/ 40249: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } -var MapCache = __nccwpck_require__(56912), - setCacheAdd = __nccwpck_require__(30227), - setCacheHas = __nccwpck_require__(84707); + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; -/** - * - * Creates an array cache object to store unique values. - * - * @private - * @constructor - * @param {Array} [values] The values to cache. - */ -function SetCache(values) { - var index = -1, - length = values == null ? 0 : values.length; + if (typeof cb !== 'function') cb = nop; - this.__data__ = new MapCache; - while (++index < length) { - this.add(values[index]); + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); } -} - -// Add methods to `SetCache`. -SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; -SetCache.prototype.has = setCacheHas; -module.exports = SetCache; + return ret; +}; +Writable.prototype.cork = function () { + var state = this._writableState; -/***/ }), + state.corked++; +}; -/***/ 23207: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Writable.prototype.uncork = function () { + var state = this._writableState; -var root = __nccwpck_require__(47491); + if (state.corked) { + state.corked--; -/** Built-in value references. */ -var Symbol = root.Symbol; - -module.exports = Symbol; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; -/***/ }), +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} -/***/ 38698: -/***/ ((module) => { +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); -/** - * A faster alternative to `Function#apply`, this function invokes `func` - * with the `this` binding of `thisArg` and the arguments of `args`. - * - * @private - * @param {Function} func The function to invoke. - * @param {*} thisArg The `this` binding of `func`. - * @param {Array} args The arguments to invoke `func` with. - * @returns {*} Returns the result of `func`. - */ -function apply(func, thisArg, args) { - switch (args.length) { - case 0: return func.call(thisArg); - case 1: return func.call(thisArg, args[0]); - case 2: return func.call(thisArg, args[0], args[1]); - case 3: return func.call(thisArg, args[0], args[1], args[2]); +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } } - return func.apply(thisArg, args); -} + var len = state.objectMode ? 1 : chunk.length; -module.exports = apply; + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } -/***/ }), + return ret; +} -/***/ 55621: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} -var baseIndexOf = __nccwpck_require__(80494); +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; -/** - * A specialized version of `_.includes` for arrays without support for - * specifying an index to search from. - * - * @private - * @param {Array} [array] The array to inspect. - * @param {*} target The value to search for. - * @returns {boolean} Returns `true` if `target` is found, else `false`. - */ -function arrayIncludes(array, value) { - var length = array == null ? 0 : array.length; - return !!length && baseIndexOf(array, value, 0) > -1; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } } -module.exports = arrayIncludes; +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; -/***/ }), + onwriteStateUpdate(state); -/***/ 12086: -/***/ ((module) => { + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); -/** - * This function is like `arrayIncludes` except that it accepts a comparator. - * - * @private - * @param {Array} [array] The array to inspect. - * @param {*} target The value to search for. - * @param {Function} comparator The comparator invoked per element. - * @returns {boolean} Returns `true` if `target` is found, else `false`. - */ -function arrayIncludesWith(array, value, comparator) { - var index = -1, - length = array == null ? 0 : array.length; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } - while (++index < length) { - if (comparator(value, array[index])) { - return true; + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); } } - return false; } -module.exports = arrayIncludesWith; - +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} -/***/ }), +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} -/***/ 50822: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; -var baseTimes = __nccwpck_require__(69166), - isArguments = __nccwpck_require__(8893), - isArray = __nccwpck_require__(72332), - isBuffer = __nccwpck_require__(51538), - isIndex = __nccwpck_require__(10040), - isTypedArray = __nccwpck_require__(41350); + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; -/** Used for built-in method references. */ -var objectProto = Object.prototype; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); -/** - * Creates an array of the enumerable property names of the array-like `value`. - * - * @private - * @param {*} value The value to query. - * @param {boolean} inherited Specify returning inherited property names. - * @returns {Array} Returns the array of property names. - */ -function arrayLikeKeys(value, inherited) { - var isArr = isArray(value), - isArg = !isArr && isArguments(value), - isBuff = !isArr && !isArg && isBuffer(value), - isType = !isArr && !isArg && !isBuff && isTypedArray(value), - skipIndexes = isArr || isArg || isBuff || isType, - result = skipIndexes ? baseTimes(value.length, String) : [], - length = result.length; + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; - for (var key in value) { - if ((inherited || hasOwnProperty.call(value, key)) && - !(skipIndexes && ( - // Safari 9 has enumerable `arguments.length` in strict mode. - key == 'length' || - // Node.js 0.10 has enumerable non-index properties on buffers. - (isBuff && (key == 'offset' || key == 'parent')) || - // PhantomJS 2 has enumerable non-index properties on typed arrays. - (isType && (key == 'buffer' || key == 'byteLength' || key == 'byteOffset')) || - // Skip index properties. - isIndex(key, length) - ))) { - result.push(key); + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } } - } - return result; -} -module.exports = arrayLikeKeys; + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} -/***/ }), +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; -/***/ 43611: -/***/ ((module) => { +Writable.prototype._writev = null; -/** - * A specialized version of `_.map` for arrays without support for iteratee - * shorthands. - * - * @private - * @param {Array} [array] The array to iterate over. - * @param {Function} iteratee The function invoked per iteration. - * @returns {Array} Returns the new mapped array. - */ -function arrayMap(array, iteratee) { - var index = -1, - length = array == null ? 0 : array.length, - result = Array(length); +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; - while (++index < length) { - result[index] = iteratee(array[index], index, array); + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; } - return result; -} - -module.exports = arrayMap; - -/***/ }), + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); -/***/ 98299: -/***/ ((module) => { + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } -/** - * Appends the elements of `values` to `array`. - * - * @private - * @param {Array} array The array to modify. - * @param {Array} values The values to append. - * @returns {Array} Returns `array`. - */ -function arrayPush(array, values) { - var index = -1, - length = values.length, - offset = array.length; + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); +}; - while (++index < length) { - array[offset + index] = values[index]; +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } } - return array; } -module.exports = arrayPush; - +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} -/***/ }), +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} -/***/ 96310: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } -var eq = __nccwpck_require__(13426); + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} -/** - * Gets the index at which the `key` is found in `array` of key-value pairs. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} key The key to search for. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function assocIndexOf(array, key) { - var length = array.length; - while (length--) { - if (eq(array[length][0], key)) { - return length; +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; } - } - return -1; -} -module.exports = assocIndexOf; + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; /***/ }), -/***/ 38139: +/***/ 39215: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var SetCache = __nccwpck_require__(40249), - arrayIncludes = __nccwpck_require__(55621), - arrayIncludesWith = __nccwpck_require__(12086), - arrayMap = __nccwpck_require__(43611), - baseUnary = __nccwpck_require__(65541), - cacheHas = __nccwpck_require__(74182); +"use strict"; -/** Used as the size to enable large array optimizations. */ -var LARGE_ARRAY_SIZE = 200; -/** - * The base implementation of methods like `_.difference` without support - * for excluding multiple arrays or iteratee shorthands. - * - * @private - * @param {Array} array The array to inspect. - * @param {Array} values The values to exclude. - * @param {Function} [iteratee] The iteratee invoked per element. - * @param {Function} [comparator] The comparator invoked per element. - * @returns {Array} Returns the new array of filtered values. - */ -function baseDifference(array, values, iteratee, comparator) { - var index = -1, - includes = arrayIncludes, - isCommon = true, - length = array.length, - result = [], - valuesLength = values.length; +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } - if (!length) { - return result; - } - if (iteratee) { - values = arrayMap(values, baseUnary(iteratee)); - } - if (comparator) { - includes = arrayIncludesWith; - isCommon = false; - } - else if (values.length >= LARGE_ARRAY_SIZE) { - includes = cacheHas; - isCommon = false; - values = new SetCache(values); - } - outer: - while (++index < length) { - var value = array[index], - computed = iteratee == null ? value : iteratee(value); +var Buffer = (__nccwpck_require__(51594).Buffer); +var util = __nccwpck_require__(73837); - value = (comparator || value !== 0) ? value : 0; - if (isCommon && computed === computed) { - var valuesIndex = valuesLength; - while (valuesIndex--) { - if (values[valuesIndex] === computed) { - continue outer; - } - } - result.push(value); - } - else if (!includes(values, computed, comparator)) { - result.push(value); - } - } - return result; +function copyBuffer(src, target, offset) { + src.copy(target, offset); } -module.exports = baseDifference; - +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); -/***/ }), + this.head = null; + this.tail = null; + this.length = 0; + } -/***/ 23789: -/***/ ((module) => { + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; -/** - * The base implementation of `_.findIndex` and `_.findLastIndex` without - * support for iteratee shorthands. - * - * @private - * @param {Array} array The array to inspect. - * @param {Function} predicate The function invoked per iteration. - * @param {number} fromIndex The index to search from. - * @param {boolean} [fromRight] Specify iterating from right to left. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function baseFindIndex(array, predicate, fromIndex, fromRight) { - var length = array.length, - index = fromIndex + (fromRight ? 1 : -1); + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; - while ((fromRight ? index-- : ++index < length)) { - if (predicate(array[index], index, array)) { - return index; + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; } - } - return -1; -} + return ret; + }; -module.exports = baseFindIndex; + return BufferList; +}(); +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} /***/ }), -/***/ 65209: +/***/ 45136: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var arrayPush = __nccwpck_require__(98299), - isFlattenable = __nccwpck_require__(60441); +"use strict"; -/** - * The base implementation of `_.flatten` with support for restricting flattening. - * - * @private - * @param {Array} array The array to flatten. - * @param {number} depth The maximum recursion depth. - * @param {boolean} [predicate=isFlattenable] The function invoked per iteration. - * @param {boolean} [isStrict] Restrict to values that pass `predicate` checks. - * @param {Array} [result=[]] The initial result value. - * @returns {Array} Returns the new flattened array. - */ -function baseFlatten(array, depth, predicate, isStrict, result) { - var index = -1, - length = array.length; - predicate || (predicate = isFlattenable); - result || (result = []); +/**/ - while (++index < length) { - var value = array[index]; - if (depth > 0 && predicate(value)) { - if (depth > 1) { - // Recursively flatten arrays (susceptible to call stack limits). - baseFlatten(value, depth - 1, predicate, isStrict, result); - } else { - arrayPush(result, value); +var pna = __nccwpck_require__(22927); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + pna.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, this, err); } - } else if (!isStrict) { - result[result.length] = value; } - } - return result; -} -module.exports = baseFlatten; + return this; + } + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks -/***/ }), + if (this._readableState) { + this._readableState.destroyed = true; + } -/***/ 9535: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } -var Symbol = __nccwpck_require__(23207), - getRawTag = __nccwpck_require__(31299), - objectToString = __nccwpck_require__(3945); + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + pna.nextTick(emitErrorNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, _this, err); + } + } else if (cb) { + cb(err); + } + }); -/** `Object#toString` result references. */ -var nullTag = '[object Null]', - undefinedTag = '[object Undefined]'; + return this; +} -/** Built-in value references. */ -var symToStringTag = Symbol ? Symbol.toStringTag : undefined; +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } -/** - * The base implementation of `getTag` without fallbacks for buggy environments. - * - * @private - * @param {*} value The value to query. - * @returns {string} Returns the `toStringTag`. - */ -function baseGetTag(value) { - if (value == null) { - return value === undefined ? undefinedTag : nullTag; + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; } - return (symToStringTag && symToStringTag in Object(value)) - ? getRawTag(value) - : objectToString(value); } -module.exports = baseGetTag; +function emitErrorNT(self, err) { + self.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy +}; /***/ }), -/***/ 80494: +/***/ 99737: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseFindIndex = __nccwpck_require__(23789), - baseIsNaN = __nccwpck_require__(66653), - strictIndexOf = __nccwpck_require__(8802); - -/** - * The base implementation of `_.indexOf` without `fromIndex` bounds checks. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} value The value to search for. - * @param {number} fromIndex The index to search from. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function baseIndexOf(array, value, fromIndex) { - return value === value - ? strictIndexOf(array, value, fromIndex) - : baseFindIndex(array, baseIsNaN, fromIndex); -} - -module.exports = baseIndexOf; +module.exports = __nccwpck_require__(12781); /***/ }), -/***/ 67701: +/***/ 12972: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseGetTag = __nccwpck_require__(9535), - isObjectLike = __nccwpck_require__(42981); - -/** `Object#toString` result references. */ -var argsTag = '[object Arguments]'; - -/** - * The base implementation of `_.isArguments`. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an `arguments` object, - */ -function baseIsArguments(value) { - return isObjectLike(value) && baseGetTag(value) == argsTag; -} - -module.exports = baseIsArguments; +module.exports = __nccwpck_require__(52024).PassThrough /***/ }), -/***/ 66653: -/***/ ((module) => { +/***/ 52024: +/***/ ((module, exports, __nccwpck_require__) => { -/** - * The base implementation of `_.isNaN` without support for number objects. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. - */ -function baseIsNaN(value) { - return value !== value; +var Stream = __nccwpck_require__(12781); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(59018); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(42359); + exports.Duplex = __nccwpck_require__(32295); + exports.Transform = __nccwpck_require__(53183); + exports.PassThrough = __nccwpck_require__(46969); } -module.exports = baseIsNaN; - /***/ }), -/***/ 45065: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var isFunction = __nccwpck_require__(88286), - isMasked = __nccwpck_require__(11598), - isObject = __nccwpck_require__(69438), - toSource = __nccwpck_require__(12640); - -/** - * Used to match `RegExp` - * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). - */ -var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; - -/** Used to detect host constructors (Safari). */ -var reIsHostCtor = /^\[object .+?Constructor\]$/; - -/** Used for built-in method references. */ -var funcProto = Function.prototype, - objectProto = Object.prototype; - -/** Used to resolve the decompiled source of functions. */ -var funcToString = funcProto.toString; - -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; +/***/ 51594: +/***/ ((module, exports, __nccwpck_require__) => { -/** Used to detect if a method is native. */ -var reIsNative = RegExp('^' + - funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') - .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' -); +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(14300) +var Buffer = buffer.Buffer -/** - * The base implementation of `_.isNative` without bad shim checks. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a native function, - * else `false`. - */ -function baseIsNative(value) { - if (!isObject(value) || isMasked(value)) { - return false; +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] } - var pattern = isFunction(value) ? reIsNative : reIsHostCtor; - return pattern.test(toSource(value)); } - -module.exports = baseIsNative; - - -/***/ }), - -/***/ 97458: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var baseGetTag = __nccwpck_require__(9535), - isLength = __nccwpck_require__(20383), - isObjectLike = __nccwpck_require__(42981); - -/** `Object#toString` result references. */ -var argsTag = '[object Arguments]', - arrayTag = '[object Array]', - boolTag = '[object Boolean]', - dateTag = '[object Date]', - errorTag = '[object Error]', - funcTag = '[object Function]', - mapTag = '[object Map]', - numberTag = '[object Number]', - objectTag = '[object Object]', - regexpTag = '[object RegExp]', - setTag = '[object Set]', - stringTag = '[object String]', - weakMapTag = '[object WeakMap]'; - -var arrayBufferTag = '[object ArrayBuffer]', - dataViewTag = '[object DataView]', - float32Tag = '[object Float32Array]', - float64Tag = '[object Float64Array]', - int8Tag = '[object Int8Array]', - int16Tag = '[object Int16Array]', - int32Tag = '[object Int32Array]', - uint8Tag = '[object Uint8Array]', - uint8ClampedTag = '[object Uint8ClampedArray]', - uint16Tag = '[object Uint16Array]', - uint32Tag = '[object Uint32Array]'; - -/** Used to identify `toStringTag` values of typed arrays. */ -var typedArrayTags = {}; -typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = -typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = -typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = -typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = -typedArrayTags[uint32Tag] = true; -typedArrayTags[argsTag] = typedArrayTags[arrayTag] = -typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = -typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = -typedArrayTags[errorTag] = typedArrayTags[funcTag] = -typedArrayTags[mapTag] = typedArrayTags[numberTag] = -typedArrayTags[objectTag] = typedArrayTags[regexpTag] = -typedArrayTags[setTag] = typedArrayTags[stringTag] = -typedArrayTags[weakMapTag] = false; - -/** - * The base implementation of `_.isTypedArray` without Node.js optimizations. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. - */ -function baseIsTypedArray(value) { - return isObjectLike(value) && - isLength(value.length) && !!typedArrayTags[baseGetTag(value)]; +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer } -module.exports = baseIsTypedArray; - - -/***/ }), - -/***/ 43011: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var isObject = __nccwpck_require__(69438), - isPrototype = __nccwpck_require__(30084), - nativeKeysIn = __nccwpck_require__(94330); - -/** Used for built-in method references. */ -var objectProto = Object.prototype; +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) -/** - * The base implementation of `_.keysIn` which doesn't treat sparse arrays as dense. - * - * @private - * @param {Object} object The object to query. - * @returns {Array} Returns the array of property names. - */ -function baseKeysIn(object) { - if (!isObject(object)) { - return nativeKeysIn(object); +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') } - var isProto = isPrototype(object), - result = []; + return Buffer(arg, encodingOrOffset, length) +} - for (var key in object) { - if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) { - result.push(key); +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) } + } else { + buf.fill(0) } - return result; + return buf } -module.exports = baseKeysIn; - - -/***/ }), - -/***/ 35899: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var identity = __nccwpck_require__(15108), - overRest = __nccwpck_require__(70533), - setToString = __nccwpck_require__(56882); - -/** - * The base implementation of `_.rest` which doesn't validate or coerce arguments. - * - * @private - * @param {Function} func The function to apply a rest parameter to. - * @param {number} [start=func.length-1] The start position of the rest parameter. - * @returns {Function} Returns the new function. - */ -function baseRest(func, start) { - return setToString(overRest(func, start, identity), func + ''); +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) } -module.exports = baseRest; +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} /***/ }), -/***/ 44088: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 37386: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -var constant = __nccwpck_require__(68297), - defineProperty = __nccwpck_require__(97322), - identity = __nccwpck_require__(15108); +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -/** - * The base implementation of `setToString` without support for hot loop shorting. - * - * @private - * @param {Function} func The function to modify. - * @param {Function} string The `toString` result. - * @returns {Function} Returns `func`. - */ -var baseSetToString = !defineProperty ? identity : function(func, string) { - return defineProperty(func, 'toString', { - 'configurable': true, - 'enumerable': false, - 'value': constant(string), - 'writable': true - }); -}; -module.exports = baseSetToString; +/**/ -/***/ }), +var Buffer = (__nccwpck_require__(51594).Buffer); +/**/ -/***/ 69166: -/***/ ((module) => { +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; -/** - * The base implementation of `_.times` without support for iteratee shorthands - * or max array length checks. - * - * @private - * @param {number} n The number of times to invoke `iteratee`. - * @param {Function} iteratee The function invoked per iteration. - * @returns {Array} Returns the array of results. - */ -function baseTimes(n, iteratee) { - var index = -1, - result = Array(n); +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; - while (++index < n) { - result[index] = iteratee(index); +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.s = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; } - return result; + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); } -module.exports = baseTimes; +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; +StringDecoder.prototype.end = utf8End; -/***/ }), +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; -/***/ 65541: -/***/ ((module) => { +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; -/** - * The base implementation of `_.unary` without support for storing metadata. - * - * @private - * @param {Function} func The function to cap arguments for. - * @returns {Function} Returns the new capped function. - */ -function baseUnary(func) { - return function(value) { - return func(value); - }; +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; } -module.exports = baseUnary; +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} /***/ }), -/***/ 16947: +/***/ 43872: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var SetCache = __nccwpck_require__(40249), - arrayIncludes = __nccwpck_require__(55621), - arrayIncludesWith = __nccwpck_require__(12086), - cacheHas = __nccwpck_require__(74182), - createSet = __nccwpck_require__(27552), - setToArray = __nccwpck_require__(350); - -/** Used as the size to enable large array optimizations. */ -var LARGE_ARRAY_SIZE = 200; +var hashClear = __nccwpck_require__(40925), + hashDelete = __nccwpck_require__(54404), + hashGet = __nccwpck_require__(20742), + hashHas = __nccwpck_require__(23701), + hashSet = __nccwpck_require__(58081); /** - * The base implementation of `_.uniqBy` without support for iteratee shorthands. + * Creates a hash object. * * @private - * @param {Array} array The array to inspect. - * @param {Function} [iteratee] The iteratee invoked per element. - * @param {Function} [comparator] The comparator invoked per element. - * @returns {Array} Returns the new duplicate free array. + * @constructor + * @param {Array} [entries] The key-value pairs to cache. */ -function baseUniq(array, iteratee, comparator) { +function Hash(entries) { var index = -1, - includes = arrayIncludes, - length = array.length, - isCommon = true, - result = [], - seen = result; + length = entries == null ? 0 : entries.length; - if (comparator) { - isCommon = false; - includes = arrayIncludesWith; - } - else if (length >= LARGE_ARRAY_SIZE) { - var set = iteratee ? null : createSet(array); - if (set) { - return setToArray(set); - } - isCommon = false; - includes = cacheHas; - seen = new SetCache; - } - else { - seen = iteratee ? [] : result; - } - outer: + this.clear(); while (++index < length) { - var value = array[index], - computed = iteratee ? iteratee(value) : value; - - value = (comparator || value !== 0) ? value : 0; - if (isCommon && computed === computed) { - var seenIndex = seen.length; - while (seenIndex--) { - if (seen[seenIndex] === computed) { - continue outer; - } - } - if (iteratee) { - seen.push(computed); - } - result.push(value); - } - else if (!includes(seen, computed, comparator)) { - if (seen !== result) { - seen.push(computed); - } - result.push(value); - } + var entry = entries[index]; + this.set(entry[0], entry[1]); } - return result; } -module.exports = baseUniq; +// Add methods to `Hash`. +Hash.prototype.clear = hashClear; +Hash.prototype['delete'] = hashDelete; +Hash.prototype.get = hashGet; +Hash.prototype.has = hashHas; +Hash.prototype.set = hashSet; + +module.exports = Hash; /***/ }), -/***/ 74182: -/***/ ((module) => { +/***/ 59059: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var listCacheClear = __nccwpck_require__(45317), + listCacheDelete = __nccwpck_require__(89205), + listCacheGet = __nccwpck_require__(22440), + listCacheHas = __nccwpck_require__(76144), + listCacheSet = __nccwpck_require__(67567); /** - * Checks if a `cache` value for `key` exists. + * Creates an list cache object. * * @private - * @param {Object} cache The cache to query. - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + * @constructor + * @param {Array} [entries] The key-value pairs to cache. */ -function cacheHas(cache, key) { - return cache.has(key); +function ListCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } } -module.exports = cacheHas; +// Add methods to `ListCache`. +ListCache.prototype.clear = listCacheClear; +ListCache.prototype['delete'] = listCacheDelete; +ListCache.prototype.get = listCacheGet; +ListCache.prototype.has = listCacheHas; +ListCache.prototype.set = listCacheSet; + +module.exports = ListCache; /***/ }), -/***/ 77591: +/***/ 17445: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var root = __nccwpck_require__(47491); +var getNative = __nccwpck_require__(9596), + root = __nccwpck_require__(28748); -/** Used to detect overreaching core-js shims. */ -var coreJsData = root['__core-js_shared__']; +/* Built-in method references that are verified to be native. */ +var Map = getNative(root, 'Map'); -module.exports = coreJsData; +module.exports = Map; /***/ }), -/***/ 27552: +/***/ 10992: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var Set = __nccwpck_require__(109), - noop = __nccwpck_require__(51592), - setToArray = __nccwpck_require__(350); - -/** Used as references for various `Number` constants. */ -var INFINITY = 1 / 0; +var mapCacheClear = __nccwpck_require__(2503), + mapCacheDelete = __nccwpck_require__(18855), + mapCacheGet = __nccwpck_require__(68372), + mapCacheHas = __nccwpck_require__(56559), + mapCacheSet = __nccwpck_require__(38710); /** - * Creates a set object of `values`. + * Creates a map cache object to store key-value pairs. * * @private - * @param {Array} values The values to add to the set. - * @returns {Object} Returns the new set. + * @constructor + * @param {Array} [entries] The key-value pairs to cache. */ -var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { - return new Set(values); -}; - -module.exports = createSet; - - -/***/ }), - -/***/ 97322: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +function MapCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; -var getNative = __nccwpck_require__(92682); + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } +} -var defineProperty = (function() { - try { - var func = getNative(Object, 'defineProperty'); - func({}, '', {}); - return func; - } catch (e) {} -}()); +// Add methods to `MapCache`. +MapCache.prototype.clear = mapCacheClear; +MapCache.prototype['delete'] = mapCacheDelete; +MapCache.prototype.get = mapCacheGet; +MapCache.prototype.has = mapCacheHas; +MapCache.prototype.set = mapCacheSet; -module.exports = defineProperty; +module.exports = MapCache; /***/ }), -/***/ 49312: -/***/ ((module) => { +/***/ 32684: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** Detect free variable `global` from Node.js. */ -var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; +var getNative = __nccwpck_require__(9596), + root = __nccwpck_require__(28748); -module.exports = freeGlobal; +/* Built-in method references that are verified to be native. */ +var Set = getNative(root, 'Set'); + +module.exports = Set; /***/ }), -/***/ 5497: +/***/ 41850: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var isKeyable = __nccwpck_require__(39524); +var MapCache = __nccwpck_require__(10992), + setCacheAdd = __nccwpck_require__(30223), + setCacheHas = __nccwpck_require__(69785); /** - * Gets the data for `map`. + * + * Creates an array cache object to store unique values. * * @private - * @param {Object} map The map to query. - * @param {string} key The reference key. - * @returns {*} Returns the map data. + * @constructor + * @param {Array} [values] The values to cache. */ -function getMapData(map, key) { - var data = map.__data__; - return isKeyable(key) - ? data[typeof key == 'string' ? 'string' : 'hash'] - : data.map; +function SetCache(values) { + var index = -1, + length = values == null ? 0 : values.length; + + this.__data__ = new MapCache; + while (++index < length) { + this.add(values[index]); + } } -module.exports = getMapData; +// Add methods to `SetCache`. +SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; +SetCache.prototype.has = setCacheHas; + +module.exports = SetCache; /***/ }), -/***/ 92682: +/***/ 69707: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseIsNative = __nccwpck_require__(45065), - getValue = __nccwpck_require__(80328); +var root = __nccwpck_require__(28748); + +/** Built-in value references. */ +var Symbol = root.Symbol; + +module.exports = Symbol; + + +/***/ }), + +/***/ 92341: +/***/ ((module) => { /** - * Gets the native function at `key` of `object`. + * A faster alternative to `Function#apply`, this function invokes `func` + * with the `this` binding of `thisArg` and the arguments of `args`. * * @private - * @param {Object} object The object to query. - * @param {string} key The key of the method to get. - * @returns {*} Returns the function if it's native, else `undefined`. + * @param {Function} func The function to invoke. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} args The arguments to invoke `func` with. + * @returns {*} Returns the result of `func`. */ -function getNative(object, key) { - var value = getValue(object, key); - return baseIsNative(value) ? value : undefined; +function apply(func, thisArg, args) { + switch (args.length) { + case 0: return func.call(thisArg); + case 1: return func.call(thisArg, args[0]); + case 2: return func.call(thisArg, args[0], args[1]); + case 3: return func.call(thisArg, args[0], args[1], args[2]); + } + return func.apply(thisArg, args); } -module.exports = getNative; +module.exports = apply; /***/ }), -/***/ 77947: +/***/ 54118: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var overArg = __nccwpck_require__(85436); +var baseIndexOf = __nccwpck_require__(98450); -/** Built-in value references. */ -var getPrototype = overArg(Object.getPrototypeOf, Object); +/** + * A specialized version of `_.includes` for arrays without support for + * specifying an index to search from. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ +function arrayIncludes(array, value) { + var length = array == null ? 0 : array.length; + return !!length && baseIndexOf(array, value, 0) > -1; +} -module.exports = getPrototype; +module.exports = arrayIncludes; /***/ }), -/***/ 31299: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var Symbol = __nccwpck_require__(23207); - -/** Used for built-in method references. */ -var objectProto = Object.prototype; - -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; +/***/ 80804: +/***/ ((module) => { /** - * Used to resolve the - * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) - * of values. + * This function is like `arrayIncludes` except that it accepts a comparator. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @param {Function} comparator The comparator invoked per element. + * @returns {boolean} Returns `true` if `target` is found, else `false`. */ -var nativeObjectToString = objectProto.toString; +function arrayIncludesWith(array, value, comparator) { + var index = -1, + length = array == null ? 0 : array.length; -/** Built-in value references. */ -var symToStringTag = Symbol ? Symbol.toStringTag : undefined; + while (++index < length) { + if (comparator(value, array[index])) { + return true; + } + } + return false; +} + +module.exports = arrayIncludesWith; + + +/***/ }), + +/***/ 16341: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var baseTimes = __nccwpck_require__(4374), + isArguments = __nccwpck_require__(470), + isArray = __nccwpck_require__(39261), + isBuffer = __nccwpck_require__(10648), + isIndex = __nccwpck_require__(77677), + isTypedArray = __nccwpck_require__(9056); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; /** - * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. + * Creates an array of the enumerable property names of the array-like `value`. * * @private * @param {*} value The value to query. - * @returns {string} Returns the raw `toStringTag`. + * @param {boolean} inherited Specify returning inherited property names. + * @returns {Array} Returns the array of property names. */ -function getRawTag(value) { - var isOwn = hasOwnProperty.call(value, symToStringTag), - tag = value[symToStringTag]; - - try { - value[symToStringTag] = undefined; - var unmasked = true; - } catch (e) {} +function arrayLikeKeys(value, inherited) { + var isArr = isArray(value), + isArg = !isArr && isArguments(value), + isBuff = !isArr && !isArg && isBuffer(value), + isType = !isArr && !isArg && !isBuff && isTypedArray(value), + skipIndexes = isArr || isArg || isBuff || isType, + result = skipIndexes ? baseTimes(value.length, String) : [], + length = result.length; - var result = nativeObjectToString.call(value); - if (unmasked) { - if (isOwn) { - value[symToStringTag] = tag; - } else { - delete value[symToStringTag]; + for (var key in value) { + if ((inherited || hasOwnProperty.call(value, key)) && + !(skipIndexes && ( + // Safari 9 has enumerable `arguments.length` in strict mode. + key == 'length' || + // Node.js 0.10 has enumerable non-index properties on buffers. + (isBuff && (key == 'offset' || key == 'parent')) || + // PhantomJS 2 has enumerable non-index properties on typed arrays. + (isType && (key == 'buffer' || key == 'byteLength' || key == 'byteOffset')) || + // Skip index properties. + isIndex(key, length) + ))) { + result.push(key); } } return result; } -module.exports = getRawTag; +module.exports = arrayLikeKeys; /***/ }), -/***/ 80328: +/***/ 2107: /***/ ((module) => { /** - * Gets the value at `key` of `object`. + * A specialized version of `_.map` for arrays without support for iteratee + * shorthands. * * @private - * @param {Object} [object] The object to query. - * @param {string} key The key of the property to get. - * @returns {*} Returns the property value. + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. */ -function getValue(object, key) { - return object == null ? undefined : object[key]; +function arrayMap(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length, + result = Array(length); + + while (++index < length) { + result[index] = iteratee(array[index], index, array); + } + return result; } -module.exports = getValue; +module.exports = arrayMap; /***/ }), -/***/ 49514: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var nativeCreate = __nccwpck_require__(68503); +/***/ 74590: +/***/ ((module) => { /** - * Removes all key-value entries from the hash. + * Appends the elements of `values` to `array`. * * @private - * @name clear - * @memberOf Hash + * @param {Array} array The array to modify. + * @param {Array} values The values to append. + * @returns {Array} Returns `array`. */ -function hashClear() { - this.__data__ = nativeCreate ? nativeCreate(null) : {}; - this.size = 0; +function arrayPush(array, values) { + var index = -1, + length = values.length, + offset = array.length; + + while (++index < length) { + array[offset + index] = values[index]; + } + return array; } -module.exports = hashClear; +module.exports = arrayPush; /***/ }), -/***/ 53822: -/***/ ((module) => { +/***/ 89334: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var eq = __nccwpck_require__(36107); /** - * Removes `key` and its value from the hash. + * Gets the index at which the `key` is found in `array` of key-value pairs. * * @private - * @name delete - * @memberOf Hash - * @param {Object} hash The hash to modify. - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. + * @param {Array} array The array to inspect. + * @param {*} key The key to search for. + * @returns {number} Returns the index of the matched value, else `-1`. */ -function hashDelete(key) { - var result = this.has(key) && delete this.__data__[key]; - this.size -= result ? 1 : 0; - return result; +function assocIndexOf(array, key) { + var length = array.length; + while (length--) { + if (eq(array[length][0], key)) { + return length; + } + } + return -1; } -module.exports = hashDelete; +module.exports = assocIndexOf; /***/ }), -/***/ 96275: +/***/ 64335: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var nativeCreate = __nccwpck_require__(68503); - -/** Used to stand-in for `undefined` hash values. */ -var HASH_UNDEFINED = '__lodash_hash_undefined__'; - -/** Used for built-in method references. */ -var objectProto = Object.prototype; +var SetCache = __nccwpck_require__(41850), + arrayIncludes = __nccwpck_require__(54118), + arrayIncludesWith = __nccwpck_require__(80804), + arrayMap = __nccwpck_require__(2107), + baseUnary = __nccwpck_require__(76067), + cacheHas = __nccwpck_require__(6450); -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; +/** Used as the size to enable large array optimizations. */ +var LARGE_ARRAY_SIZE = 200; /** - * Gets the hash value for `key`. + * The base implementation of methods like `_.difference` without support + * for excluding multiple arrays or iteratee shorthands. * * @private - * @name get - * @memberOf Hash - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. + * @param {Array} array The array to inspect. + * @param {Array} values The values to exclude. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. */ -function hashGet(key) { - var data = this.__data__; - if (nativeCreate) { - var result = data[key]; - return result === HASH_UNDEFINED ? undefined : result; +function baseDifference(array, values, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + isCommon = true, + length = array.length, + result = [], + valuesLength = values.length; + + if (!length) { + return result; } - return hasOwnProperty.call(data, key) ? data[key] : undefined; + if (iteratee) { + values = arrayMap(values, baseUnary(iteratee)); + } + if (comparator) { + includes = arrayIncludesWith; + isCommon = false; + } + else if (values.length >= LARGE_ARRAY_SIZE) { + includes = cacheHas; + isCommon = false; + values = new SetCache(values); + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee == null ? value : iteratee(value); + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var valuesIndex = valuesLength; + while (valuesIndex--) { + if (values[valuesIndex] === computed) { + continue outer; + } + } + result.push(value); + } + else if (!includes(values, computed, comparator)) { + result.push(value); + } + } + return result; } -module.exports = hashGet; +module.exports = baseDifference; /***/ }), -/***/ 26194: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var nativeCreate = __nccwpck_require__(68503); - -/** Used for built-in method references. */ -var objectProto = Object.prototype; - -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; +/***/ 33866: +/***/ ((module) => { /** - * Checks if a hash value for `key` exists. + * The base implementation of `_.findIndex` and `_.findLastIndex` without + * support for iteratee shorthands. * * @private - * @name has - * @memberOf Hash - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + * @param {Array} array The array to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {number} fromIndex The index to search from. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {number} Returns the index of the matched value, else `-1`. */ -function hashHas(key) { - var data = this.__data__; - return nativeCreate ? (data[key] !== undefined) : hasOwnProperty.call(data, key); +function baseFindIndex(array, predicate, fromIndex, fromRight) { + var length = array.length, + index = fromIndex + (fromRight ? 1 : -1); + + while ((fromRight ? index-- : ++index < length)) { + if (predicate(array[index], index, array)) { + return index; + } + } + return -1; } -module.exports = hashHas; +module.exports = baseFindIndex; /***/ }), -/***/ 24116: +/***/ 83985: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var nativeCreate = __nccwpck_require__(68503); - -/** Used to stand-in for `undefined` hash values. */ -var HASH_UNDEFINED = '__lodash_hash_undefined__'; +var arrayPush = __nccwpck_require__(74590), + isFlattenable = __nccwpck_require__(48637); /** - * Sets the hash `key` to `value`. + * The base implementation of `_.flatten` with support for restricting flattening. * * @private - * @name set - * @memberOf Hash - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the hash instance. + * @param {Array} array The array to flatten. + * @param {number} depth The maximum recursion depth. + * @param {boolean} [predicate=isFlattenable] The function invoked per iteration. + * @param {boolean} [isStrict] Restrict to values that pass `predicate` checks. + * @param {Array} [result=[]] The initial result value. + * @returns {Array} Returns the new flattened array. */ -function hashSet(key, value) { - var data = this.__data__; - this.size += this.has(key) ? 0 : 1; - data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; - return this; +function baseFlatten(array, depth, predicate, isStrict, result) { + var index = -1, + length = array.length; + + predicate || (predicate = isFlattenable); + result || (result = []); + + while (++index < length) { + var value = array[index]; + if (depth > 0 && predicate(value)) { + if (depth > 1) { + // Recursively flatten arrays (susceptible to call stack limits). + baseFlatten(value, depth - 1, predicate, isStrict, result); + } else { + arrayPush(result, value); + } + } else if (!isStrict) { + result[result.length] = value; + } + } + return result; } -module.exports = hashSet; +module.exports = baseFlatten; /***/ }), -/***/ 60441: +/***/ 83958: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var Symbol = __nccwpck_require__(23207), - isArguments = __nccwpck_require__(8893), - isArray = __nccwpck_require__(72332); +var Symbol = __nccwpck_require__(69707), + getRawTag = __nccwpck_require__(55994), + objectToString = __nccwpck_require__(56163); + +/** `Object#toString` result references. */ +var nullTag = '[object Null]', + undefinedTag = '[object Undefined]'; /** Built-in value references. */ -var spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : undefined; +var symToStringTag = Symbol ? Symbol.toStringTag : undefined; /** - * Checks if `value` is a flattenable `arguments` object or array. + * The base implementation of `getTag` without fallbacks for buggy environments. * * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is flattenable, else `false`. + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. */ -function isFlattenable(value) { - return isArray(value) || isArguments(value) || - !!(spreadableSymbol && value && value[spreadableSymbol]); +function baseGetTag(value) { + if (value == null) { + return value === undefined ? undefinedTag : nullTag; + } + return (symToStringTag && symToStringTag in Object(value)) + ? getRawTag(value) + : objectToString(value); } -module.exports = isFlattenable; +module.exports = baseGetTag; /***/ }), -/***/ 10040: -/***/ ((module) => { - -/** Used as references for various `Number` constants. */ -var MAX_SAFE_INTEGER = 9007199254740991; +/***/ 98450: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** Used to detect unsigned integer values. */ -var reIsUint = /^(?:0|[1-9]\d*)$/; +var baseFindIndex = __nccwpck_require__(33866), + baseIsNaN = __nccwpck_require__(35316), + strictIndexOf = __nccwpck_require__(26764); /** - * Checks if `value` is a valid array-like index. + * The base implementation of `_.indexOf` without `fromIndex` bounds checks. * * @private - * @param {*} value The value to check. - * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. - * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. */ -function isIndex(value, length) { - var type = typeof value; - length = length == null ? MAX_SAFE_INTEGER : length; - - return !!length && - (type == 'number' || - (type != 'symbol' && reIsUint.test(value))) && - (value > -1 && value % 1 == 0 && value < length); +function baseIndexOf(array, value, fromIndex) { + return value === value + ? strictIndexOf(array, value, fromIndex) + : baseFindIndex(array, baseIsNaN, fromIndex); } -module.exports = isIndex; +module.exports = baseIndexOf; /***/ }), -/***/ 14458: +/***/ 9228: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var eq = __nccwpck_require__(13426), - isArrayLike = __nccwpck_require__(90462), - isIndex = __nccwpck_require__(10040), - isObject = __nccwpck_require__(69438); +var baseGetTag = __nccwpck_require__(83958), + isObjectLike = __nccwpck_require__(50350); + +/** `Object#toString` result references. */ +var argsTag = '[object Arguments]'; /** - * Checks if the given arguments are from an iteratee call. + * The base implementation of `_.isArguments`. * * @private - * @param {*} value The potential iteratee value argument. - * @param {*} index The potential iteratee index or key argument. - * @param {*} object The potential iteratee object argument. - * @returns {boolean} Returns `true` if the arguments are from an iteratee call, - * else `false`. + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, */ -function isIterateeCall(value, index, object) { - if (!isObject(object)) { - return false; - } - var type = typeof index; - if (type == 'number' - ? (isArrayLike(object) && isIndex(index, object.length)) - : (type == 'string' && index in object) - ) { - return eq(object[index], value); - } - return false; +function baseIsArguments(value) { + return isObjectLike(value) && baseGetTag(value) == argsTag; } -module.exports = isIterateeCall; +module.exports = baseIsArguments; /***/ }), -/***/ 39524: +/***/ 35316: /***/ ((module) => { /** - * Checks if `value` is suitable for use as unique object key. + * The base implementation of `_.isNaN` without support for number objects. * * @private * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is suitable, else `false`. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. */ -function isKeyable(value) { - var type = typeof value; - return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') - ? (value !== '__proto__') - : (value === null); +function baseIsNaN(value) { + return value !== value; } -module.exports = isKeyable; +module.exports = baseIsNaN; /***/ }), -/***/ 11598: +/***/ 36681: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var coreJsData = __nccwpck_require__(77591); - -/** Used to detect methods masquerading as native. */ -var maskSrcKey = (function() { - var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); - return uid ? ('Symbol(src)_1.' + uid) : ''; -}()); +var isFunction = __nccwpck_require__(13033), + isMasked = __nccwpck_require__(10730), + isObject = __nccwpck_require__(21079), + toSource = __nccwpck_require__(32225); /** - * Checks if `func` has its source masked. - * - * @private - * @param {Function} func The function to check. - * @returns {boolean} Returns `true` if `func` is masked, else `false`. + * Used to match `RegExp` + * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). */ -function isMasked(func) { - return !!maskSrcKey && (maskSrcKey in func); -} +var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; -module.exports = isMasked; +/** Used to detect host constructors (Safari). */ +var reIsHostCtor = /^\[object .+?Constructor\]$/; +/** Used for built-in method references. */ +var funcProto = Function.prototype, + objectProto = Object.prototype; -/***/ }), +/** Used to resolve the decompiled source of functions. */ +var funcToString = funcProto.toString; -/***/ 30084: -/***/ ((module) => { +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; -/** Used for built-in method references. */ -var objectProto = Object.prototype; +/** Used to detect if a method is native. */ +var reIsNative = RegExp('^' + + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') + .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' +); /** - * Checks if `value` is likely a prototype object. + * The base implementation of `_.isNative` without bad shim checks. * * @private * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. */ -function isPrototype(value) { - var Ctor = value && value.constructor, - proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto; - - return value === proto; +function baseIsNative(value) { + if (!isObject(value) || isMasked(value)) { + return false; + } + var pattern = isFunction(value) ? reIsNative : reIsHostCtor; + return pattern.test(toSource(value)); } -module.exports = isPrototype; +module.exports = baseIsNative; /***/ }), -/***/ 61671: -/***/ ((module) => { +/***/ 38332: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var baseGetTag = __nccwpck_require__(83958), + isLength = __nccwpck_require__(45007), + isObjectLike = __nccwpck_require__(50350); + +/** `Object#toString` result references. */ +var argsTag = '[object Arguments]', + arrayTag = '[object Array]', + boolTag = '[object Boolean]', + dateTag = '[object Date]', + errorTag = '[object Error]', + funcTag = '[object Function]', + mapTag = '[object Map]', + numberTag = '[object Number]', + objectTag = '[object Object]', + regexpTag = '[object RegExp]', + setTag = '[object Set]', + stringTag = '[object String]', + weakMapTag = '[object WeakMap]'; + +var arrayBufferTag = '[object ArrayBuffer]', + dataViewTag = '[object DataView]', + float32Tag = '[object Float32Array]', + float64Tag = '[object Float64Array]', + int8Tag = '[object Int8Array]', + int16Tag = '[object Int16Array]', + int32Tag = '[object Int32Array]', + uint8Tag = '[object Uint8Array]', + uint8ClampedTag = '[object Uint8ClampedArray]', + uint16Tag = '[object Uint16Array]', + uint32Tag = '[object Uint32Array]'; + +/** Used to identify `toStringTag` values of typed arrays. */ +var typedArrayTags = {}; +typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = +typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = +typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = +typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = +typedArrayTags[uint32Tag] = true; +typedArrayTags[argsTag] = typedArrayTags[arrayTag] = +typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = +typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = +typedArrayTags[errorTag] = typedArrayTags[funcTag] = +typedArrayTags[mapTag] = typedArrayTags[numberTag] = +typedArrayTags[objectTag] = typedArrayTags[regexpTag] = +typedArrayTags[setTag] = typedArrayTags[stringTag] = +typedArrayTags[weakMapTag] = false; /** - * Removes all key-value entries from the list cache. + * The base implementation of `_.isTypedArray` without Node.js optimizations. * * @private - * @name clear - * @memberOf ListCache + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. */ -function listCacheClear() { - this.__data__ = []; - this.size = 0; +function baseIsTypedArray(value) { + return isObjectLike(value) && + isLength(value.length) && !!typedArrayTags[baseGetTag(value)]; } -module.exports = listCacheClear; +module.exports = baseIsTypedArray; /***/ }), -/***/ 93533: +/***/ 75027: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var assocIndexOf = __nccwpck_require__(96310); +var isObject = __nccwpck_require__(21079), + isPrototype = __nccwpck_require__(8430), + nativeKeysIn = __nccwpck_require__(44607); /** Used for built-in method references. */ -var arrayProto = Array.prototype; +var objectProto = Object.prototype; -/** Built-in value references. */ -var splice = arrayProto.splice; +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; /** - * Removes `key` and its value from the list cache. + * The base implementation of `_.keysIn` which doesn't treat sparse arrays as dense. * * @private - * @name delete - * @memberOf ListCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. */ -function listCacheDelete(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - if (index < 0) { - return false; +function baseKeysIn(object) { + if (!isObject(object)) { + return nativeKeysIn(object); } - var lastIndex = data.length - 1; - if (index == lastIndex) { - data.pop(); - } else { - splice.call(data, index, 1); + var isProto = isPrototype(object), + result = []; + + for (var key in object) { + if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) { + result.push(key); + } } - --this.size; - return true; + return result; } -module.exports = listCacheDelete; +module.exports = baseKeysIn; /***/ }), -/***/ 22651: +/***/ 77912: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var assocIndexOf = __nccwpck_require__(96310); +var identity = __nccwpck_require__(25249), + overRest = __nccwpck_require__(1112), + setToString = __nccwpck_require__(34022); /** - * Gets the list cache value for `key`. + * The base implementation of `_.rest` which doesn't validate or coerce arguments. * * @private - * @name get - * @memberOf ListCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. */ -function listCacheGet(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - return index < 0 ? undefined : data[index][1]; +function baseRest(func, start) { + return setToString(overRest(func, start, identity), func + ''); } -module.exports = listCacheGet; +module.exports = baseRest; /***/ }), -/***/ 49577: +/***/ 89906: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var assocIndexOf = __nccwpck_require__(96310); +var constant = __nccwpck_require__(22324), + defineProperty = __nccwpck_require__(27199), + identity = __nccwpck_require__(25249); /** - * Checks if a list cache value for `key` exists. + * The base implementation of `setToString` without support for hot loop shorting. * * @private - * @name has - * @memberOf ListCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. */ -function listCacheHas(key) { - return assocIndexOf(this.__data__, key) > -1; -} +var baseSetToString = !defineProperty ? identity : function(func, string) { + return defineProperty(func, 'toString', { + 'configurable': true, + 'enumerable': false, + 'value': constant(string), + 'writable': true + }); +}; -module.exports = listCacheHas; +module.exports = baseSetToString; /***/ }), -/***/ 39980: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var assocIndexOf = __nccwpck_require__(96310); +/***/ 4374: +/***/ ((module) => { /** - * Sets the list cache `key` to `value`. + * The base implementation of `_.times` without support for iteratee shorthands + * or max array length checks. * * @private - * @name set - * @memberOf ListCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the list cache instance. + * @param {number} n The number of times to invoke `iteratee`. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the array of results. */ -function listCacheSet(key, value) { - var data = this.__data__, - index = assocIndexOf(data, key); +function baseTimes(n, iteratee) { + var index = -1, + result = Array(n); - if (index < 0) { - ++this.size; - data.push([key, value]); - } else { - data[index][1] = value; + while (++index < n) { + result[index] = iteratee(index); } - return this; + return result; } -module.exports = listCacheSet; +module.exports = baseTimes; /***/ }), -/***/ 50248: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var Hash = __nccwpck_require__(44859), - ListCache = __nccwpck_require__(4901), - Map = __nccwpck_require__(92451); +/***/ 76067: +/***/ ((module) => { /** - * Removes all key-value entries from the map. + * The base implementation of `_.unary` without support for storing metadata. * * @private - * @name clear - * @memberOf MapCache + * @param {Function} func The function to cap arguments for. + * @returns {Function} Returns the new capped function. */ -function mapCacheClear() { - this.size = 0; - this.__data__ = { - 'hash': new Hash, - 'map': new (Map || ListCache), - 'string': new Hash +function baseUnary(func) { + return function(value) { + return func(value); }; } -module.exports = mapCacheClear; +module.exports = baseUnary; /***/ }), -/***/ 53775: +/***/ 43519: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var getMapData = __nccwpck_require__(5497); +var SetCache = __nccwpck_require__(41850), + arrayIncludes = __nccwpck_require__(54118), + arrayIncludesWith = __nccwpck_require__(80804), + cacheHas = __nccwpck_require__(6450), + createSet = __nccwpck_require__(42772), + setToArray = __nccwpck_require__(65326); + +/** Used as the size to enable large array optimizations. */ +var LARGE_ARRAY_SIZE = 200; /** - * Removes `key` and its value from the map. + * The base implementation of `_.uniqBy` without support for iteratee shorthands. * * @private - * @name delete - * @memberOf MapCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. */ -function mapCacheDelete(key) { - var result = getMapData(this, key)['delete'](key); - this.size -= result ? 1 : 0; +function baseUniq(array, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + length = array.length, + isCommon = true, + result = [], + seen = result; + + if (comparator) { + isCommon = false; + includes = arrayIncludesWith; + } + else if (length >= LARGE_ARRAY_SIZE) { + var set = iteratee ? null : createSet(array); + if (set) { + return setToArray(set); + } + isCommon = false; + includes = cacheHas; + seen = new SetCache; + } + else { + seen = iteratee ? [] : result; + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var seenIndex = seen.length; + while (seenIndex--) { + if (seen[seenIndex] === computed) { + continue outer; + } + } + if (iteratee) { + seen.push(computed); + } + result.push(value); + } + else if (!includes(seen, computed, comparator)) { + if (seen !== result) { + seen.push(computed); + } + result.push(value); + } + } return result; } -module.exports = mapCacheDelete; +module.exports = baseUniq; /***/ }), -/***/ 40330: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var getMapData = __nccwpck_require__(5497); +/***/ 6450: +/***/ ((module) => { /** - * Gets the map value for `key`. + * Checks if a `cache` value for `key` exists. * * @private - * @name get - * @memberOf MapCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. + * @param {Object} cache The cache to query. + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ -function mapCacheGet(key) { - return getMapData(this, key).get(key); +function cacheHas(cache, key) { + return cache.has(key); } -module.exports = mapCacheGet; +module.exports = cacheHas; /***/ }), -/***/ 36592: +/***/ 77650: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var getMapData = __nccwpck_require__(5497); +var root = __nccwpck_require__(28748); -/** - * Checks if a map value for `key` exists. - * - * @private - * @name has - * @memberOf MapCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function mapCacheHas(key) { - return getMapData(this, key).has(key); -} +/** Used to detect overreaching core-js shims. */ +var coreJsData = root['__core-js_shared__']; -module.exports = mapCacheHas; +module.exports = coreJsData; /***/ }), -/***/ 31233: +/***/ 42772: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var getMapData = __nccwpck_require__(5497); +var Set = __nccwpck_require__(32684), + noop = __nccwpck_require__(67811), + setToArray = __nccwpck_require__(65326); + +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0; /** - * Sets the map `key` to `value`. + * Creates a set object of `values`. * * @private - * @name set - * @memberOf MapCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the map cache instance. + * @param {Array} values The values to add to the set. + * @returns {Object} Returns the new set. */ -function mapCacheSet(key, value) { - var data = getMapData(this, key), - size = data.size; - - data.set(key, value); - this.size += data.size == size ? 0 : 1; - return this; -} +var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { + return new Set(values); +}; -module.exports = mapCacheSet; +module.exports = createSet; /***/ }), -/***/ 68503: +/***/ 27199: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var getNative = __nccwpck_require__(92682); +var getNative = __nccwpck_require__(9596); -/* Built-in method references that are verified to be native. */ -var nativeCreate = getNative(Object, 'create'); +var defineProperty = (function() { + try { + var func = getNative(Object, 'defineProperty'); + func({}, '', {}); + return func; + } catch (e) {} +}()); -module.exports = nativeCreate; +module.exports = defineProperty; /***/ }), -/***/ 94330: +/***/ 29824: /***/ ((module) => { +/** Detect free variable `global` from Node.js. */ +var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; + +module.exports = freeGlobal; + + +/***/ }), + +/***/ 88191: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var isKeyable = __nccwpck_require__(45627); + /** - * This function is like - * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) - * except that it includes inherited enumerable properties. + * Gets the data for `map`. * * @private - * @param {Object} object The object to query. - * @returns {Array} Returns the array of property names. + * @param {Object} map The map to query. + * @param {string} key The reference key. + * @returns {*} Returns the map data. */ -function nativeKeysIn(object) { - var result = []; - if (object != null) { - for (var key in Object(object)) { - result.push(key); - } - } - return result; +function getMapData(map, key) { + var data = map.__data__; + return isKeyable(key) + ? data[typeof key == 'string' ? 'string' : 'hash'] + : data.map; } -module.exports = nativeKeysIn; +module.exports = getMapData; /***/ }), -/***/ 60101: -/***/ ((module, exports, __nccwpck_require__) => { +/***/ 9596: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/* module decorator */ module = __nccwpck_require__.nmd(module); -var freeGlobal = __nccwpck_require__(49312); +var baseIsNative = __nccwpck_require__(36681), + getValue = __nccwpck_require__(43427); -/** Detect free variable `exports`. */ -var freeExports = true && exports && !exports.nodeType && exports; +/** + * Gets the native function at `key` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the method to get. + * @returns {*} Returns the function if it's native, else `undefined`. + */ +function getNative(object, key) { + var value = getValue(object, key); + return baseIsNative(value) ? value : undefined; +} -/** Detect free variable `module`. */ -var freeModule = freeExports && "object" == 'object' && module && !module.nodeType && module; +module.exports = getNative; -/** Detect the popular CommonJS extension `module.exports`. */ -var moduleExports = freeModule && freeModule.exports === freeExports; -/** Detect free variable `process` from Node.js. */ -var freeProcess = moduleExports && freeGlobal.process; +/***/ }), -/** Used to access faster Node.js helpers. */ -var nodeUtil = (function() { - try { - // Use `util.types` for Node.js 10+. - var types = freeModule && freeModule.require && freeModule.require('util').types; +/***/ 12707: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (types) { - return types; - } +var overArg = __nccwpck_require__(91700); - // Legacy `process.binding('util')` for Node.js < 10. - return freeProcess && freeProcess.binding && freeProcess.binding('util'); - } catch (e) {} -}()); +/** Built-in value references. */ +var getPrototype = overArg(Object.getPrototypeOf, Object); -module.exports = nodeUtil; +module.exports = getPrototype; /***/ }), -/***/ 3945: -/***/ ((module) => { +/***/ 55994: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var Symbol = __nccwpck_require__(69707); /** Used for built-in method references. */ var objectProto = Object.prototype; +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + /** * Used to resolve the * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) @@ -69363,7456 +63300,5292 @@ var objectProto = Object.prototype; */ var nativeObjectToString = objectProto.toString; +/** Built-in value references. */ +var symToStringTag = Symbol ? Symbol.toStringTag : undefined; + /** - * Converts `value` to a string using `Object.prototype.toString`. + * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. * * @private - * @param {*} value The value to convert. - * @returns {string} Returns the converted string. + * @param {*} value The value to query. + * @returns {string} Returns the raw `toStringTag`. */ -function objectToString(value) { - return nativeObjectToString.call(value); +function getRawTag(value) { + var isOwn = hasOwnProperty.call(value, symToStringTag), + tag = value[symToStringTag]; + + try { + value[symToStringTag] = undefined; + var unmasked = true; + } catch (e) {} + + var result = nativeObjectToString.call(value); + if (unmasked) { + if (isOwn) { + value[symToStringTag] = tag; + } else { + delete value[symToStringTag]; + } + } + return result; } -module.exports = objectToString; +module.exports = getRawTag; /***/ }), -/***/ 85436: +/***/ 43427: /***/ ((module) => { /** - * Creates a unary function that invokes `func` with its argument transformed. + * Gets the value at `key` of `object`. * * @private - * @param {Function} func The function to wrap. - * @param {Function} transform The argument transform. - * @returns {Function} Returns the new function. + * @param {Object} [object] The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. */ -function overArg(func, transform) { - return function(arg) { - return func(transform(arg)); - }; +function getValue(object, key) { + return object == null ? undefined : object[key]; } -module.exports = overArg; +module.exports = getValue; /***/ }), -/***/ 70533: +/***/ 40925: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var apply = __nccwpck_require__(38698); - -/* Built-in method references for those with the same name as other `lodash` methods. */ -var nativeMax = Math.max; +var nativeCreate = __nccwpck_require__(35178); /** - * A specialized version of `baseRest` which transforms the rest array. + * Removes all key-value entries from the hash. * * @private - * @param {Function} func The function to apply a rest parameter to. - * @param {number} [start=func.length-1] The start position of the rest parameter. - * @param {Function} transform The rest array transform. - * @returns {Function} Returns the new function. + * @name clear + * @memberOf Hash */ -function overRest(func, start, transform) { - start = nativeMax(start === undefined ? (func.length - 1) : start, 0); - return function() { - var args = arguments, - index = -1, - length = nativeMax(args.length - start, 0), - array = Array(length); - - while (++index < length) { - array[index] = args[start + index]; - } - index = -1; - var otherArgs = Array(start + 1); - while (++index < start) { - otherArgs[index] = args[index]; - } - otherArgs[start] = transform(array); - return apply(func, this, otherArgs); - }; +function hashClear() { + this.__data__ = nativeCreate ? nativeCreate(null) : {}; + this.size = 0; } -module.exports = overRest; +module.exports = hashClear; /***/ }), -/***/ 47491: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var freeGlobal = __nccwpck_require__(49312); - -/** Detect free variable `self`. */ -var freeSelf = typeof self == 'object' && self && self.Object === Object && self; +/***/ 54404: +/***/ ((module) => { -/** Used as a reference to the global object. */ -var root = freeGlobal || freeSelf || Function('return this')(); +/** + * Removes `key` and its value from the hash. + * + * @private + * @name delete + * @memberOf Hash + * @param {Object} hash The hash to modify. + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ +function hashDelete(key) { + var result = this.has(key) && delete this.__data__[key]; + this.size -= result ? 1 : 0; + return result; +} -module.exports = root; +module.exports = hashDelete; /***/ }), -/***/ 30227: -/***/ ((module) => { +/***/ 20742: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var nativeCreate = __nccwpck_require__(35178); /** Used to stand-in for `undefined` hash values. */ var HASH_UNDEFINED = '__lodash_hash_undefined__'; +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + /** - * Adds `value` to the array cache. + * Gets the hash value for `key`. * * @private - * @name add - * @memberOf SetCache - * @alias push - * @param {*} value The value to cache. - * @returns {Object} Returns the cache instance. + * @name get + * @memberOf Hash + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. */ -function setCacheAdd(value) { - this.__data__.set(value, HASH_UNDEFINED); - return this; +function hashGet(key) { + var data = this.__data__; + if (nativeCreate) { + var result = data[key]; + return result === HASH_UNDEFINED ? undefined : result; + } + return hasOwnProperty.call(data, key) ? data[key] : undefined; } -module.exports = setCacheAdd; +module.exports = hashGet; /***/ }), -/***/ 84707: -/***/ ((module) => { +/***/ 23701: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var nativeCreate = __nccwpck_require__(35178); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; /** - * Checks if `value` is in the array cache. + * Checks if a hash value for `key` exists. * * @private * @name has - * @memberOf SetCache - * @param {*} value The value to search for. - * @returns {number} Returns `true` if `value` is found, else `false`. + * @memberOf Hash + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ -function setCacheHas(value) { - return this.__data__.has(value); +function hashHas(key) { + var data = this.__data__; + return nativeCreate ? (data[key] !== undefined) : hasOwnProperty.call(data, key); } -module.exports = setCacheHas; +module.exports = hashHas; /***/ }), -/***/ 350: -/***/ ((module) => { +/***/ 58081: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var nativeCreate = __nccwpck_require__(35178); + +/** Used to stand-in for `undefined` hash values. */ +var HASH_UNDEFINED = '__lodash_hash_undefined__'; /** - * Converts `set` to an array of its values. + * Sets the hash `key` to `value`. * * @private - * @param {Object} set The set to convert. - * @returns {Array} Returns the values. + * @name set + * @memberOf Hash + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the hash instance. */ -function setToArray(set) { - var index = -1, - result = Array(set.size); - - set.forEach(function(value) { - result[++index] = value; - }); - return result; +function hashSet(key, value) { + var data = this.__data__; + this.size += this.has(key) ? 0 : 1; + data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; + return this; } -module.exports = setToArray; +module.exports = hashSet; /***/ }), -/***/ 56882: +/***/ 48637: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseSetToString = __nccwpck_require__(44088), - shortOut = __nccwpck_require__(94980); +var Symbol = __nccwpck_require__(69707), + isArguments = __nccwpck_require__(470), + isArray = __nccwpck_require__(39261); + +/** Built-in value references. */ +var spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : undefined; /** - * Sets the `toString` method of `func` to return `string`. + * Checks if `value` is a flattenable `arguments` object or array. * * @private - * @param {Function} func The function to modify. - * @param {Function} string The `toString` result. - * @returns {Function} Returns `func`. + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is flattenable, else `false`. */ -var setToString = shortOut(baseSetToString); +function isFlattenable(value) { + return isArray(value) || isArguments(value) || + !!(spreadableSymbol && value && value[spreadableSymbol]); +} -module.exports = setToString; +module.exports = isFlattenable; /***/ }), -/***/ 94980: +/***/ 77677: /***/ ((module) => { -/** Used to detect hot functions by number of calls within a span of milliseconds. */ -var HOT_COUNT = 800, - HOT_SPAN = 16; +/** Used as references for various `Number` constants. */ +var MAX_SAFE_INTEGER = 9007199254740991; -/* Built-in method references for those with the same name as other `lodash` methods. */ -var nativeNow = Date.now; +/** Used to detect unsigned integer values. */ +var reIsUint = /^(?:0|[1-9]\d*)$/; /** - * Creates a function that'll short out and invoke `identity` instead - * of `func` when it's called `HOT_COUNT` or more times in `HOT_SPAN` - * milliseconds. + * Checks if `value` is a valid array-like index. * * @private - * @param {Function} func The function to restrict. - * @returns {Function} Returns the new shortable function. + * @param {*} value The value to check. + * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. + * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. */ -function shortOut(func) { - var count = 0, - lastCalled = 0; - - return function() { - var stamp = nativeNow(), - remaining = HOT_SPAN - (stamp - lastCalled); +function isIndex(value, length) { + var type = typeof value; + length = length == null ? MAX_SAFE_INTEGER : length; - lastCalled = stamp; - if (remaining > 0) { - if (++count >= HOT_COUNT) { - return arguments[0]; - } - } else { - count = 0; - } - return func.apply(undefined, arguments); - }; + return !!length && + (type == 'number' || + (type != 'symbol' && reIsUint.test(value))) && + (value > -1 && value % 1 == 0 && value < length); } -module.exports = shortOut; +module.exports = isIndex; /***/ }), -/***/ 8802: -/***/ ((module) => { +/***/ 8271: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var eq = __nccwpck_require__(36107), + isArrayLike = __nccwpck_require__(60759), + isIndex = __nccwpck_require__(77677), + isObject = __nccwpck_require__(21079); /** - * A specialized version of `_.indexOf` which performs strict equality - * comparisons of values, i.e. `===`. + * Checks if the given arguments are from an iteratee call. * * @private - * @param {Array} array The array to inspect. - * @param {*} value The value to search for. - * @param {number} fromIndex The index to search from. - * @returns {number} Returns the index of the matched value, else `-1`. + * @param {*} value The potential iteratee value argument. + * @param {*} index The potential iteratee index or key argument. + * @param {*} object The potential iteratee object argument. + * @returns {boolean} Returns `true` if the arguments are from an iteratee call, + * else `false`. */ -function strictIndexOf(array, value, fromIndex) { - var index = fromIndex - 1, - length = array.length; - - while (++index < length) { - if (array[index] === value) { - return index; - } +function isIterateeCall(value, index, object) { + if (!isObject(object)) { + return false; } - return -1; + var type = typeof index; + if (type == 'number' + ? (isArrayLike(object) && isIndex(index, object.length)) + : (type == 'string' && index in object) + ) { + return eq(object[index], value); + } + return false; } -module.exports = strictIndexOf; +module.exports = isIterateeCall; /***/ }), -/***/ 12640: +/***/ 45627: /***/ ((module) => { -/** Used for built-in method references. */ -var funcProto = Function.prototype; - -/** Used to resolve the decompiled source of functions. */ -var funcToString = funcProto.toString; - /** - * Converts `func` to its source code. + * Checks if `value` is suitable for use as unique object key. * * @private - * @param {Function} func The function to convert. - * @returns {string} Returns the source code. + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is suitable, else `false`. */ -function toSource(func) { - if (func != null) { - try { - return funcToString.call(func); - } catch (e) {} - try { - return (func + ''); - } catch (e) {} - } - return ''; +function isKeyable(value) { + var type = typeof value; + return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') + ? (value !== '__proto__') + : (value === null); } -module.exports = toSource; +module.exports = isKeyable; /***/ }), -/***/ 68297: -/***/ ((module) => { +/***/ 10730: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var coreJsData = __nccwpck_require__(77650); + +/** Used to detect methods masquerading as native. */ +var maskSrcKey = (function() { + var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); + return uid ? ('Symbol(src)_1.' + uid) : ''; +}()); /** - * Creates a function that returns `value`. - * - * @static - * @memberOf _ - * @since 2.4.0 - * @category Util - * @param {*} value The value to return from the new function. - * @returns {Function} Returns the new constant function. - * @example - * - * var objects = _.times(2, _.constant({ 'a': 1 })); - * - * console.log(objects); - * // => [{ 'a': 1 }, { 'a': 1 }] + * Checks if `func` has its source masked. * - * console.log(objects[0] === objects[1]); - * // => true + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` is masked, else `false`. */ -function constant(value) { - return function() { - return value; - }; +function isMasked(func) { + return !!maskSrcKey && (maskSrcKey in func); } -module.exports = constant; +module.exports = isMasked; /***/ }), -/***/ 30484: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var baseRest = __nccwpck_require__(35899), - eq = __nccwpck_require__(13426), - isIterateeCall = __nccwpck_require__(14458), - keysIn = __nccwpck_require__(7942); +/***/ 8430: +/***/ ((module) => { /** Used for built-in method references. */ var objectProto = Object.prototype; -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; - /** - * Assigns own and inherited enumerable string keyed properties of source - * objects to the destination object for all destination properties that - * resolve to `undefined`. Source objects are applied from left to right. - * Once a property is set, additional values of the same property are ignored. - * - * **Note:** This method mutates `object`. - * - * @static - * @since 0.1.0 - * @memberOf _ - * @category Object - * @param {Object} object The destination object. - * @param {...Object} [sources] The source objects. - * @returns {Object} Returns `object`. - * @see _.defaultsDeep - * @example + * Checks if `value` is likely a prototype object. * - * _.defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); - * // => { 'a': 1, 'b': 2 } + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. */ -var defaults = baseRest(function(object, sources) { - object = Object(object); +function isPrototype(value) { + var Ctor = value && value.constructor, + proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto; - var index = -1; - var length = sources.length; - var guard = length > 2 ? sources[2] : undefined; + return value === proto; +} - if (guard && isIterateeCall(sources[0], sources[1], guard)) { - length = 1; - } +module.exports = isPrototype; - while (++index < length) { - var source = sources[index]; - var props = keysIn(source); - var propsIndex = -1; - var propsLength = props.length; - while (++propsIndex < propsLength) { - var key = props[propsIndex]; - var value = object[key]; +/***/ }), - if (value === undefined || - (eq(value, objectProto[key]) && !hasOwnProperty.call(object, key))) { - object[key] = source[key]; - } - } - } +/***/ 45317: +/***/ ((module) => { - return object; -}); +/** + * Removes all key-value entries from the list cache. + * + * @private + * @name clear + * @memberOf ListCache + */ +function listCacheClear() { + this.__data__ = []; + this.size = 0; +} -module.exports = defaults; +module.exports = listCacheClear; /***/ }), -/***/ 46585: +/***/ 89205: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseDifference = __nccwpck_require__(38139), - baseFlatten = __nccwpck_require__(65209), - baseRest = __nccwpck_require__(35899), - isArrayLikeObject = __nccwpck_require__(65478); +var assocIndexOf = __nccwpck_require__(89334); + +/** Used for built-in method references. */ +var arrayProto = Array.prototype; + +/** Built-in value references. */ +var splice = arrayProto.splice; /** - * Creates an array of `array` values not included in the other given arrays - * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * for equality comparisons. The order and references of result values are - * determined by the first array. - * - * **Note:** Unlike `_.pullAll`, this method returns a new array. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Array - * @param {Array} array The array to inspect. - * @param {...Array} [values] The values to exclude. - * @returns {Array} Returns the new array of filtered values. - * @see _.without, _.xor - * @example + * Removes `key` and its value from the list cache. * - * _.difference([2, 1], [2, 3]); - * // => [1] + * @private + * @name delete + * @memberOf ListCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ -var difference = baseRest(function(array, values) { - return isArrayLikeObject(array) - ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true)) - : []; -}); +function listCacheDelete(key) { + var data = this.__data__, + index = assocIndexOf(data, key); -module.exports = difference; + if (index < 0) { + return false; + } + var lastIndex = data.length - 1; + if (index == lastIndex) { + data.pop(); + } else { + splice.call(data, index, 1); + } + --this.size; + return true; +} + +module.exports = listCacheDelete; /***/ }), -/***/ 13426: -/***/ ((module) => { +/***/ 22440: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var assocIndexOf = __nccwpck_require__(89334); /** - * Performs a - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * comparison between two values to determine if they are equivalent. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to compare. - * @param {*} other The other value to compare. - * @returns {boolean} Returns `true` if the values are equivalent, else `false`. - * @example - * - * var object = { 'a': 1 }; - * var other = { 'a': 1 }; - * - * _.eq(object, object); - * // => true - * - * _.eq(object, other); - * // => false - * - * _.eq('a', 'a'); - * // => true - * - * _.eq('a', Object('a')); - * // => false + * Gets the list cache value for `key`. * - * _.eq(NaN, NaN); - * // => true + * @private + * @name get + * @memberOf ListCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. */ -function eq(value, other) { - return value === other || (value !== value && other !== other); +function listCacheGet(key) { + var data = this.__data__, + index = assocIndexOf(data, key); + + return index < 0 ? undefined : data[index][1]; } -module.exports = eq; +module.exports = listCacheGet; /***/ }), -/***/ 95350: +/***/ 76144: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseFlatten = __nccwpck_require__(65209); +var assocIndexOf = __nccwpck_require__(89334); /** - * Flattens `array` a single level deep. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Array - * @param {Array} array The array to flatten. - * @returns {Array} Returns the new flattened array. - * @example + * Checks if a list cache value for `key` exists. * - * _.flatten([1, [2, [3, [4]], 5]]); - * // => [1, 2, [3, [4]], 5] + * @private + * @name has + * @memberOf ListCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ -function flatten(array) { - var length = array == null ? 0 : array.length; - return length ? baseFlatten(array, 1) : []; +function listCacheHas(key) { + return assocIndexOf(this.__data__, key) > -1; } -module.exports = flatten; +module.exports = listCacheHas; /***/ }), -/***/ 15108: -/***/ ((module) => { +/***/ 67567: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var assocIndexOf = __nccwpck_require__(89334); /** - * This method returns the first argument it receives. - * - * @static - * @since 0.1.0 - * @memberOf _ - * @category Util - * @param {*} value Any value. - * @returns {*} Returns `value`. - * @example - * - * var object = { 'a': 1 }; + * Sets the list cache `key` to `value`. * - * console.log(_.identity(object) === object); - * // => true + * @private + * @name set + * @memberOf ListCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the list cache instance. */ -function identity(value) { - return value; +function listCacheSet(key, value) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + ++this.size; + data.push([key, value]); + } else { + data[index][1] = value; + } + return this; } -module.exports = identity; +module.exports = listCacheSet; /***/ }), -/***/ 8893: +/***/ 2503: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseIsArguments = __nccwpck_require__(67701), - isObjectLike = __nccwpck_require__(42981); - -/** Used for built-in method references. */ -var objectProto = Object.prototype; - -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; - -/** Built-in value references. */ -var propertyIsEnumerable = objectProto.propertyIsEnumerable; +var Hash = __nccwpck_require__(43872), + ListCache = __nccwpck_require__(59059), + Map = __nccwpck_require__(17445); /** - * Checks if `value` is likely an `arguments` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an `arguments` object, - * else `false`. - * @example - * - * _.isArguments(function() { return arguments; }()); - * // => true + * Removes all key-value entries from the map. * - * _.isArguments([1, 2, 3]); - * // => false + * @private + * @name clear + * @memberOf MapCache */ -var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { - return isObjectLike(value) && hasOwnProperty.call(value, 'callee') && - !propertyIsEnumerable.call(value, 'callee'); -}; +function mapCacheClear() { + this.size = 0; + this.__data__ = { + 'hash': new Hash, + 'map': new (Map || ListCache), + 'string': new Hash + }; +} -module.exports = isArguments; +module.exports = mapCacheClear; /***/ }), -/***/ 72332: -/***/ ((module) => { +/***/ 18855: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var getMapData = __nccwpck_require__(88191); /** - * Checks if `value` is classified as an `Array` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an array, else `false`. - * @example - * - * _.isArray([1, 2, 3]); - * // => true - * - * _.isArray(document.body.children); - * // => false - * - * _.isArray('abc'); - * // => false + * Removes `key` and its value from the map. * - * _.isArray(_.noop); - * // => false + * @private + * @name delete + * @memberOf MapCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ -var isArray = Array.isArray; +function mapCacheDelete(key) { + var result = getMapData(this, key)['delete'](key); + this.size -= result ? 1 : 0; + return result; +} -module.exports = isArray; +module.exports = mapCacheDelete; /***/ }), -/***/ 90462: +/***/ 68372: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var isFunction = __nccwpck_require__(88286), - isLength = __nccwpck_require__(20383); +var getMapData = __nccwpck_require__(88191); /** - * Checks if `value` is array-like. A value is considered array-like if it's - * not a function and has a `value.length` that's an integer greater than or - * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is array-like, else `false`. - * @example - * - * _.isArrayLike([1, 2, 3]); - * // => true - * - * _.isArrayLike(document.body.children); - * // => true - * - * _.isArrayLike('abc'); - * // => true + * Gets the map value for `key`. * - * _.isArrayLike(_.noop); - * // => false + * @private + * @name get + * @memberOf MapCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. */ -function isArrayLike(value) { - return value != null && isLength(value.length) && !isFunction(value); +function mapCacheGet(key) { + return getMapData(this, key).get(key); } -module.exports = isArrayLike; +module.exports = mapCacheGet; /***/ }), -/***/ 65478: +/***/ 56559: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var isArrayLike = __nccwpck_require__(90462), - isObjectLike = __nccwpck_require__(42981); +var getMapData = __nccwpck_require__(88191); /** - * This method is like `_.isArrayLike` except that it also checks if `value` - * is an object. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an array-like object, - * else `false`. - * @example - * - * _.isArrayLikeObject([1, 2, 3]); - * // => true - * - * _.isArrayLikeObject(document.body.children); - * // => true - * - * _.isArrayLikeObject('abc'); - * // => false + * Checks if a map value for `key` exists. * - * _.isArrayLikeObject(_.noop); - * // => false + * @private + * @name has + * @memberOf MapCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. */ -function isArrayLikeObject(value) { - return isObjectLike(value) && isArrayLike(value); +function mapCacheHas(key) { + return getMapData(this, key).has(key); } -module.exports = isArrayLikeObject; +module.exports = mapCacheHas; /***/ }), -/***/ 51538: -/***/ ((module, exports, __nccwpck_require__) => { - -/* module decorator */ module = __nccwpck_require__.nmd(module); -var root = __nccwpck_require__(47491), - stubFalse = __nccwpck_require__(30919); - -/** Detect free variable `exports`. */ -var freeExports = true && exports && !exports.nodeType && exports; - -/** Detect free variable `module`. */ -var freeModule = freeExports && "object" == 'object' && module && !module.nodeType && module; - -/** Detect the popular CommonJS extension `module.exports`. */ -var moduleExports = freeModule && freeModule.exports === freeExports; - -/** Built-in value references. */ -var Buffer = moduleExports ? root.Buffer : undefined; +/***/ 38710: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/* Built-in method references for those with the same name as other `lodash` methods. */ -var nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined; +var getMapData = __nccwpck_require__(88191); /** - * Checks if `value` is a buffer. - * - * @static - * @memberOf _ - * @since 4.3.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a buffer, else `false`. - * @example - * - * _.isBuffer(new Buffer(2)); - * // => true + * Sets the map `key` to `value`. * - * _.isBuffer(new Uint8Array(2)); - * // => false + * @private + * @name set + * @memberOf MapCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the map cache instance. */ -var isBuffer = nativeIsBuffer || stubFalse; +function mapCacheSet(key, value) { + var data = getMapData(this, key), + size = data.size; -module.exports = isBuffer; + data.set(key, value); + this.size += data.size == size ? 0 : 1; + return this; +} + +module.exports = mapCacheSet; /***/ }), -/***/ 88286: +/***/ 35178: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseGetTag = __nccwpck_require__(9535), - isObject = __nccwpck_require__(69438); +var getNative = __nccwpck_require__(9596); -/** `Object#toString` result references. */ -var asyncTag = '[object AsyncFunction]', - funcTag = '[object Function]', - genTag = '[object GeneratorFunction]', - proxyTag = '[object Proxy]'; +/* Built-in method references that are verified to be native. */ +var nativeCreate = getNative(Object, 'create'); + +module.exports = nativeCreate; + + +/***/ }), + +/***/ 44607: +/***/ ((module) => { /** - * Checks if `value` is classified as a `Function` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a function, else `false`. - * @example - * - * _.isFunction(_); - * // => true + * This function is like + * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * except that it includes inherited enumerable properties. * - * _.isFunction(/abc/); - * // => false + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. */ -function isFunction(value) { - if (!isObject(value)) { - return false; +function nativeKeysIn(object) { + var result = []; + if (object != null) { + for (var key in Object(object)) { + result.push(key); + } } - // The use of `Object#toString` avoids issues with the `typeof` operator - // in Safari 9 which returns 'object' for typed arrays and other constructors. - var tag = baseGetTag(value); - return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; + return result; } -module.exports = isFunction; +module.exports = nativeKeysIn; /***/ }), -/***/ 20383: -/***/ ((module) => { +/***/ 46315: +/***/ ((module, exports, __nccwpck_require__) => { -/** Used as references for various `Number` constants. */ -var MAX_SAFE_INTEGER = 9007199254740991; +/* module decorator */ module = __nccwpck_require__.nmd(module); +var freeGlobal = __nccwpck_require__(29824); -/** - * Checks if `value` is a valid array-like length. - * - * **Note:** This method is loosely based on - * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. - * @example - * - * _.isLength(3); - * // => true - * - * _.isLength(Number.MIN_VALUE); - * // => false - * - * _.isLength(Infinity); - * // => false - * - * _.isLength('3'); - * // => false - */ -function isLength(value) { - return typeof value == 'number' && - value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; -} +/** Detect free variable `exports`. */ +var freeExports = true && exports && !exports.nodeType && exports; -module.exports = isLength; +/** Detect free variable `module`. */ +var freeModule = freeExports && "object" == 'object' && module && !module.nodeType && module; + +/** Detect the popular CommonJS extension `module.exports`. */ +var moduleExports = freeModule && freeModule.exports === freeExports; + +/** Detect free variable `process` from Node.js. */ +var freeProcess = moduleExports && freeGlobal.process; + +/** Used to access faster Node.js helpers. */ +var nodeUtil = (function() { + try { + // Use `util.types` for Node.js 10+. + var types = freeModule && freeModule.require && freeModule.require('util').types; + + if (types) { + return types; + } + + // Legacy `process.binding('util')` for Node.js < 10. + return freeProcess && freeProcess.binding && freeProcess.binding('util'); + } catch (e) {} +}()); + +module.exports = nodeUtil; /***/ }), -/***/ 69438: +/***/ 56163: /***/ ((module) => { +/** Used for built-in method references. */ +var objectProto = Object.prototype; + /** - * Checks if `value` is the - * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) - * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an object, else `false`. - * @example - * - * _.isObject({}); - * // => true - * - * _.isObject([1, 2, 3]); - * // => true - * - * _.isObject(_.noop); - * // => true + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var nativeObjectToString = objectProto.toString; + +/** + * Converts `value` to a string using `Object.prototype.toString`. * - * _.isObject(null); - * // => false + * @private + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. */ -function isObject(value) { - var type = typeof value; - return value != null && (type == 'object' || type == 'function'); +function objectToString(value) { + return nativeObjectToString.call(value); } -module.exports = isObject; +module.exports = objectToString; /***/ }), -/***/ 42981: +/***/ 91700: /***/ ((module) => { /** - * Checks if `value` is object-like. A value is object-like if it's not `null` - * and has a `typeof` result of "object". - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is object-like, else `false`. - * @example - * - * _.isObjectLike({}); - * // => true - * - * _.isObjectLike([1, 2, 3]); - * // => true - * - * _.isObjectLike(_.noop); - * // => false + * Creates a unary function that invokes `func` with its argument transformed. * - * _.isObjectLike(null); - * // => false + * @private + * @param {Function} func The function to wrap. + * @param {Function} transform The argument transform. + * @returns {Function} Returns the new function. */ -function isObjectLike(value) { - return value != null && typeof value == 'object'; +function overArg(func, transform) { + return function(arg) { + return func(transform(arg)); + }; } -module.exports = isObjectLike; +module.exports = overArg; /***/ }), -/***/ 94280: +/***/ 1112: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseGetTag = __nccwpck_require__(9535), - getPrototype = __nccwpck_require__(77947), - isObjectLike = __nccwpck_require__(42981); - -/** `Object#toString` result references. */ -var objectTag = '[object Object]'; - -/** Used for built-in method references. */ -var funcProto = Function.prototype, - objectProto = Object.prototype; - -/** Used to resolve the decompiled source of functions. */ -var funcToString = funcProto.toString; - -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; +var apply = __nccwpck_require__(92341); -/** Used to infer the `Object` constructor. */ -var objectCtorString = funcToString.call(Object); +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max; /** - * Checks if `value` is a plain object, that is, an object created by the - * `Object` constructor or one with a `[[Prototype]]` of `null`. - * - * @static - * @memberOf _ - * @since 0.8.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. - * @example - * - * function Foo() { - * this.a = 1; - * } - * - * _.isPlainObject(new Foo); - * // => false - * - * _.isPlainObject([1, 2, 3]); - * // => false - * - * _.isPlainObject({ 'x': 0, 'y': 0 }); - * // => true + * A specialized version of `baseRest` which transforms the rest array. * - * _.isPlainObject(Object.create(null)); - * // => true + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @param {Function} transform The rest array transform. + * @returns {Function} Returns the new function. */ -function isPlainObject(value) { - if (!isObjectLike(value) || baseGetTag(value) != objectTag) { - return false; - } - var proto = getPrototype(value); - if (proto === null) { - return true; - } - var Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor; - return typeof Ctor == 'function' && Ctor instanceof Ctor && - funcToString.call(Ctor) == objectCtorString; +function overRest(func, start, transform) { + start = nativeMax(start === undefined ? (func.length - 1) : start, 0); + return function() { + var args = arguments, + index = -1, + length = nativeMax(args.length - start, 0), + array = Array(length); + + while (++index < length) { + array[index] = args[start + index]; + } + index = -1; + var otherArgs = Array(start + 1); + while (++index < start) { + otherArgs[index] = args[index]; + } + otherArgs[start] = transform(array); + return apply(func, this, otherArgs); + }; } -module.exports = isPlainObject; +module.exports = overRest; /***/ }), -/***/ 41350: +/***/ 28748: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseIsTypedArray = __nccwpck_require__(97458), - baseUnary = __nccwpck_require__(65541), - nodeUtil = __nccwpck_require__(60101); +var freeGlobal = __nccwpck_require__(29824); -/* Node.js helper references. */ -var nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray; +/** Detect free variable `self`. */ +var freeSelf = typeof self == 'object' && self && self.Object === Object && self; -/** - * Checks if `value` is classified as a typed array. - * - * @static - * @memberOf _ - * @since 3.0.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. - * @example - * - * _.isTypedArray(new Uint8Array); - * // => true - * - * _.isTypedArray([]); - * // => false - */ -var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray; +/** Used as a reference to the global object. */ +var root = freeGlobal || freeSelf || Function('return this')(); -module.exports = isTypedArray; +module.exports = root; /***/ }), -/***/ 7942: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 30223: +/***/ ((module) => { -var arrayLikeKeys = __nccwpck_require__(50822), - baseKeysIn = __nccwpck_require__(43011), - isArrayLike = __nccwpck_require__(90462); +/** Used to stand-in for `undefined` hash values. */ +var HASH_UNDEFINED = '__lodash_hash_undefined__'; /** - * Creates an array of the own and inherited enumerable property names of `object`. - * - * **Note:** Non-object values are coerced to objects. - * - * @static - * @memberOf _ - * @since 3.0.0 - * @category Object - * @param {Object} object The object to query. - * @returns {Array} Returns the array of property names. - * @example - * - * function Foo() { - * this.a = 1; - * this.b = 2; - * } - * - * Foo.prototype.c = 3; + * Adds `value` to the array cache. * - * _.keysIn(new Foo); - * // => ['a', 'b', 'c'] (iteration order is not guaranteed) + * @private + * @name add + * @memberOf SetCache + * @alias push + * @param {*} value The value to cache. + * @returns {Object} Returns the cache instance. */ -function keysIn(object) { - return isArrayLike(object) ? arrayLikeKeys(object, true) : baseKeysIn(object); +function setCacheAdd(value) { + this.__data__.set(value, HASH_UNDEFINED); + return this; } -module.exports = keysIn; +module.exports = setCacheAdd; /***/ }), -/***/ 51592: +/***/ 69785: /***/ ((module) => { /** - * This method returns `undefined`. - * - * @static - * @memberOf _ - * @since 2.3.0 - * @category Util - * @example + * Checks if `value` is in the array cache. * - * _.times(2, _.noop); - * // => [undefined, undefined] + * @private + * @name has + * @memberOf SetCache + * @param {*} value The value to search for. + * @returns {number} Returns `true` if `value` is found, else `false`. */ -function noop() { - // No operation performed. +function setCacheHas(value) { + return this.__data__.has(value); } -module.exports = noop; +module.exports = setCacheHas; /***/ }), -/***/ 30919: +/***/ 65326: /***/ ((module) => { /** - * This method returns `false`. - * - * @static - * @memberOf _ - * @since 4.13.0 - * @category Util - * @returns {boolean} Returns `false`. - * @example + * Converts `set` to an array of its values. * - * _.times(2, _.stubFalse); - * // => [false, false] + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the values. */ -function stubFalse() { - return false; +function setToArray(set) { + var index = -1, + result = Array(set.size); + + set.forEach(function(value) { + result[++index] = value; + }); + return result; } -module.exports = stubFalse; +module.exports = setToArray; /***/ }), -/***/ 42513: +/***/ 34022: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var baseFlatten = __nccwpck_require__(65209), - baseRest = __nccwpck_require__(35899), - baseUniq = __nccwpck_require__(16947), - isArrayLikeObject = __nccwpck_require__(65478); +var baseSetToString = __nccwpck_require__(89906), + shortOut = __nccwpck_require__(12976); /** - * Creates an array of unique values, in order, from all given arrays using - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * for equality comparisons. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Array - * @param {...Array} [arrays] The arrays to inspect. - * @returns {Array} Returns the new array of combined values. - * @example + * Sets the `toString` method of `func` to return `string`. * - * _.union([2], [1, 2]); - * // => [2, 1] + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. */ -var union = baseRest(function(arrays) { - return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true)); -}); +var setToString = shortOut(baseSetToString); -module.exports = union; +module.exports = setToString; /***/ }), -/***/ 52311: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 12976: +/***/ ((module) => { -/*! - * mime-db - * Copyright(c) 2014 Jonathan Ong - * Copyright(c) 2015-2022 Douglas Christopher Wilson - * MIT Licensed - */ +/** Used to detect hot functions by number of calls within a span of milliseconds. */ +var HOT_COUNT = 800, + HOT_SPAN = 16; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeNow = Date.now; /** - * Module exports. + * Creates a function that'll short out and invoke `identity` instead + * of `func` when it's called `HOT_COUNT` or more times in `HOT_SPAN` + * milliseconds. + * + * @private + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new shortable function. */ +function shortOut(func) { + var count = 0, + lastCalled = 0; -module.exports = __nccwpck_require__(53765) - + return function() { + var stamp = nativeNow(), + remaining = HOT_SPAN - (stamp - lastCalled); -/***/ }), + lastCalled = stamp; + if (remaining > 0) { + if (++count >= HOT_COUNT) { + return arguments[0]; + } + } else { + count = 0; + } + return func.apply(undefined, arguments); + }; +} -/***/ 24651: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +module.exports = shortOut; -"use strict"; -/*! - * mime-types - * Copyright(c) 2014 Jonathan Ong - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ +/***/ }), +/***/ 26764: +/***/ ((module) => { /** - * Module dependencies. + * A specialized version of `_.indexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. */ +function strictIndexOf(array, value, fromIndex) { + var index = fromIndex - 1, + length = array.length; -var db = __nccwpck_require__(52311) -var extname = (__nccwpck_require__(71017).extname) + while (++index < length) { + if (array[index] === value) { + return index; + } + } + return -1; +} -/** - * Module variables. - * @private - */ +module.exports = strictIndexOf; -var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ -var TEXT_TYPE_REGEXP = /^text\//i -/** - * Module exports. - * @public - */ +/***/ }), -exports.charset = charset -exports.charsets = { lookup: charset } -exports.contentType = contentType -exports.extension = extension -exports.extensions = Object.create(null) -exports.lookup = lookup -exports.types = Object.create(null) +/***/ 32225: +/***/ ((module) => { -// Populate the extensions/types maps -populateMaps(exports.extensions, exports.types) +/** Used for built-in method references. */ +var funcProto = Function.prototype; + +/** Used to resolve the decompiled source of functions. */ +var funcToString = funcProto.toString; /** - * Get the default charset for a MIME type. + * Converts `func` to its source code. * - * @param {string} type - * @return {boolean|string} + * @private + * @param {Function} func The function to convert. + * @returns {string} Returns the source code. */ - -function charset (type) { - if (!type || typeof type !== 'string') { - return false +function toSource(func) { + if (func != null) { + try { + return funcToString.call(func); + } catch (e) {} + try { + return (func + ''); + } catch (e) {} } + return ''; +} - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - var mime = match && db[match[1].toLowerCase()] +module.exports = toSource; - if (mime && mime.charset) { - return mime.charset - } - // default text/* to utf-8 - if (match && TEXT_TYPE_REGEXP.test(match[1])) { - return 'UTF-8' - } +/***/ }), - return false -} +/***/ 22324: +/***/ ((module) => { /** - * Create a full Content-Type header given a MIME type or extension. + * Creates a function that returns `value`. * - * @param {string} str - * @return {boolean|string} + * @static + * @memberOf _ + * @since 2.4.0 + * @category Util + * @param {*} value The value to return from the new function. + * @returns {Function} Returns the new constant function. + * @example + * + * var objects = _.times(2, _.constant({ 'a': 1 })); + * + * console.log(objects); + * // => [{ 'a': 1 }, { 'a': 1 }] + * + * console.log(objects[0] === objects[1]); + * // => true */ +function constant(value) { + return function() { + return value; + }; +} -function contentType (str) { - // TODO: should this even be in this module? - if (!str || typeof str !== 'string') { - return false - } +module.exports = constant; - var mime = str.indexOf('/') === -1 - ? exports.lookup(str) - : str - if (!mime) { - return false - } +/***/ }), - // TODO: use content-type or other module - if (mime.indexOf('charset') === -1) { - var charset = exports.charset(mime) - if (charset) mime += '; charset=' + charset.toLowerCase() - } +/***/ 41871: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return mime -} +var baseRest = __nccwpck_require__(77912), + eq = __nccwpck_require__(36107), + isIterateeCall = __nccwpck_require__(8271), + keysIn = __nccwpck_require__(13812); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; /** - * Get the default extension for a MIME type. + * Assigns own and inherited enumerable string keyed properties of source + * objects to the destination object for all destination properties that + * resolve to `undefined`. Source objects are applied from left to right. + * Once a property is set, additional values of the same property are ignored. * - * @param {string} type - * @return {boolean|string} + * **Note:** This method mutates `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaultsDeep + * @example + * + * _.defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } */ +var defaults = baseRest(function(object, sources) { + object = Object(object); -function extension (type) { - if (!type || typeof type !== 'string') { - return false + var index = -1; + var length = sources.length; + var guard = length > 2 ? sources[2] : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + length = 1; } - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) + while (++index < length) { + var source = sources[index]; + var props = keysIn(source); + var propsIndex = -1; + var propsLength = props.length; - // get extensions - var exts = match && exports.extensions[match[1].toLowerCase()] + while (++propsIndex < propsLength) { + var key = props[propsIndex]; + var value = object[key]; - if (!exts || !exts.length) { - return false + if (value === undefined || + (eq(value, objectProto[key]) && !hasOwnProperty.call(object, key))) { + object[key] = source[key]; + } + } } - return exts[0] -} + return object; +}); -/** - * Lookup the MIME type for a file path/extension. - * - * @param {string} path - * @return {boolean|string} - */ +module.exports = defaults; -function lookup (path) { - if (!path || typeof path !== 'string') { - return false - } - // get the extension ("ext" or ".ext" or full path) - var extension = extname('x.' + path) - .toLowerCase() - .substr(1) +/***/ }), - if (!extension) { - return false - } +/***/ 67022: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return exports.types[extension] || false -} +var baseDifference = __nccwpck_require__(64335), + baseFlatten = __nccwpck_require__(83985), + baseRest = __nccwpck_require__(77912), + isArrayLikeObject = __nccwpck_require__(82799); /** - * Populate the extensions and types maps. - * @private + * Creates an array of `array` values not included in the other given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * **Note:** Unlike `_.pullAll`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @returns {Array} Returns the new array of filtered values. + * @see _.without, _.xor + * @example + * + * _.difference([2, 1], [2, 3]); + * // => [1] */ +var difference = baseRest(function(array, values) { + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true)) + : []; +}); -function populateMaps (extensions, types) { - // source preference (least -> most) - var preference = ['nginx', 'apache', undefined, 'iana'] - - Object.keys(db).forEach(function forEachMimeType (type) { - var mime = db[type] - var exts = mime.extensions - - if (!exts || !exts.length) { - return - } - - // mime -> extensions - extensions[type] = exts +module.exports = difference; - // extension -> mime - for (var i = 0; i < exts.length; i++) { - var extension = exts[i] - if (types[extension]) { - var from = preference.indexOf(db[types[extension]].source) - var to = preference.indexOf(mime.source) +/***/ }), - if (types[extension] !== 'application/octet-stream' && - (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { - // skip the remapping - continue - } - } +/***/ 36107: +/***/ ((module) => { - // set the extension -> mime - types[extension] = type - } - }) +/** + * Performs a + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * comparison between two values to determine if they are equivalent. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.eq(object, object); + * // => true + * + * _.eq(object, other); + * // => false + * + * _.eq('a', 'a'); + * // => true + * + * _.eq('a', Object('a')); + * // => false + * + * _.eq(NaN, NaN); + * // => true + */ +function eq(value, other) { + return value === other || (value !== value && other !== other); } +module.exports = eq; + /***/ }), -/***/ 7530: +/***/ 15055: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var path = __nccwpck_require__(71017); -var fs = __nccwpck_require__(57147); -var _0777 = parseInt('0777', 8); - -module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; - -function mkdirP (p, opts, f, made) { - if (typeof opts === 'function') { - f = opts; - opts = {}; - } - else if (!opts || typeof opts !== 'object') { - opts = { mode: opts }; - } - - var mode = opts.mode; - var xfs = opts.fs || fs; - - if (mode === undefined) { - mode = _0777 - } - if (!made) made = null; - - var cb = f || /* istanbul ignore next */ function () {}; - p = path.resolve(p); - - xfs.mkdir(p, mode, function (er) { - if (!er) { - made = made || p; - return cb(null, made); - } - switch (er.code) { - case 'ENOENT': - /* istanbul ignore if */ - if (path.dirname(p) === p) return cb(er); - mkdirP(path.dirname(p), opts, function (er, made) { - /* istanbul ignore if */ - if (er) cb(er, made); - else mkdirP(p, opts, cb, made); - }); - break; +var baseFlatten = __nccwpck_require__(83985); - // In the case of any other error, just see if there's a dir - // there already. If so, then hooray! If not, then something - // is borked. - default: - xfs.stat(p, function (er2, stat) { - // if the stat fails, then that's super weird. - // let the original error be the failure reason. - if (er2 || !stat.isDirectory()) cb(er, made) - else cb(null, made); - }); - break; - } - }); +/** + * Flattens `array` a single level deep. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flatten([1, [2, [3, [4]], 5]]); + * // => [1, 2, [3, [4]], 5] + */ +function flatten(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, 1) : []; } -mkdirP.sync = function sync (p, opts, made) { - if (!opts || typeof opts !== 'object') { - opts = { mode: opts }; - } - - var mode = opts.mode; - var xfs = opts.fs || fs; - - if (mode === undefined) { - mode = _0777 - } - if (!made) made = null; +module.exports = flatten; - p = path.resolve(p); - try { - xfs.mkdirSync(p, mode); - made = made || p; - } - catch (err0) { - switch (err0.code) { - case 'ENOENT' : - made = sync(path.dirname(p), opts, made); - sync(p, opts, made); - break; +/***/ }), - // In the case of any other error, just see if there's a dir - // there already. If so, then hooray! If not, then something - // is borked. - default: - var stat; - try { - stat = xfs.statSync(p); - } - catch (err1) /* istanbul ignore next */ { - throw err0; - } - /* istanbul ignore if */ - if (!stat.isDirectory()) throw err0; - break; - } - } +/***/ 25249: +/***/ ((module) => { - return made; -}; +/** + * This method returns the first argument it receives. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Util + * @param {*} value Any value. + * @returns {*} Returns `value`. + * @example + * + * var object = { 'a': 1 }; + * + * console.log(_.identity(object) === object); + * // => true + */ +function identity(value) { + return value; +} + +module.exports = identity; /***/ }), -/***/ 26006: -/***/ ((module, exports, __nccwpck_require__) => { +/***/ 470: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +var baseIsArguments = __nccwpck_require__(9228), + isObjectLike = __nccwpck_require__(50350); +/** Used for built-in method references. */ +var objectProto = Object.prototype; -Object.defineProperty(exports, "__esModule", ({ value: true })); +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var Stream = _interopDefault(__nccwpck_require__(12781)); -var http = _interopDefault(__nccwpck_require__(13685)); -var Url = _interopDefault(__nccwpck_require__(57310)); -var whatwgUrl = _interopDefault(__nccwpck_require__(94971)); -var https = _interopDefault(__nccwpck_require__(95687)); -var zlib = _interopDefault(__nccwpck_require__(59796)); - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; +/** Built-in value references. */ +var propertyIsEnumerable = objectProto.propertyIsEnumerable; -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); +/** + * Checks if `value` is likely an `arguments` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + * else `false`. + * @example + * + * _.isArguments(function() { return arguments; }()); + * // => true + * + * _.isArguments([1, 2, 3]); + * // => false + */ +var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { + return isObjectLike(value) && hasOwnProperty.call(value, 'callee') && + !propertyIsEnumerable.call(value, 'callee'); +}; -class Blob { - constructor() { - this[TYPE] = ''; +module.exports = isArguments; - const blobParts = arguments[0]; - const options = arguments[1]; - const buffers = []; - let size = 0; +/***/ }), - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } +/***/ 39261: +/***/ ((module) => { - this[BUFFER] = Buffer.concat(buffers); +/** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ +var isArray = Array.isArray; - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; +module.exports = isArray; - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} +/***/ }), -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); +/***/ 60759: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); +var isFunction = __nccwpck_require__(13033), + isLength = __nccwpck_require__(45007); /** - * fetch-error.js + * Checks if `value` is array-like. A value is considered array-like if it's + * not a function and has a `value.length` that's an integer greater than or + * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is array-like, else `false`. + * @example * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError + * _.isArrayLike([1, 2, 3]); + * // => true + * + * _.isArrayLike(document.body.children); + * // => true + * + * _.isArrayLike('abc'); + * // => true + * + * _.isArrayLike(_.noop); + * // => false */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); +function isArrayLike(value) { + return value != null && isLength(value.length) && !isFunction(value); } -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; +module.exports = isArrayLike; -let convert; -try { - convert = (__nccwpck_require__(74241).convert); -} catch (e) {} -const INTERNALS = Symbol('Body internals'); +/***/ }), -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; +/***/ 82799: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var isArrayLike = __nccwpck_require__(60759), + isObjectLike = __nccwpck_require__(50350); /** - * Body mixin + * This method is like `_.isArrayLike` except that it also checks if `value` + * is an object. * - * Ref: https://fetch.spec.whatwg.org/#body + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array-like object, + * else `false`. + * @example * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void + * _.isArrayLikeObject([1, 2, 3]); + * // => true + * + * _.isArrayLikeObject(document.body.children); + * // => true + * + * _.isArrayLikeObject('abc'); + * // => false + * + * _.isArrayLikeObject(_.noop); + * // => false */ -function Body(body) { - var _this = this; +function isArrayLikeObject(value) { + return isObjectLike(value) && isArrayLike(value); +} - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; +module.exports = isArrayLikeObject; - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; +/***/ }), - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} +/***/ 10648: +/***/ ((module, exports, __nccwpck_require__) => { -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, +/* module decorator */ module = __nccwpck_require__.nmd(module); +var root = __nccwpck_require__(28748), + stubFalse = __nccwpck_require__(98755); - get bodyUsed() { - return this[INTERNALS].disturbed; - }, +/** Detect free variable `exports`. */ +var freeExports = true && exports && !exports.nodeType && exports; - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, +/** Detect free variable `module`. */ +var freeModule = freeExports && "object" == 'object' && module && !module.nodeType && module; - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, +/** Detect the popular CommonJS extension `module.exports`. */ +var moduleExports = freeModule && freeModule.exports === freeExports; - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; +/** Built-in value references. */ +var Buffer = moduleExports ? root.Buffer : undefined; - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined; - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, +/** + * Checks if `value` is a buffer. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a buffer, else `false`. + * @example + * + * _.isBuffer(new Buffer(2)); + * // => true + * + * _.isBuffer(new Uint8Array(2)); + * // => false + */ +var isBuffer = nativeIsBuffer || stubFalse; - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, +module.exports = isBuffer; - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; +/***/ }), -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); +/***/ 13033: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; +var baseGetTag = __nccwpck_require__(83958), + isObject = __nccwpck_require__(21079); + +/** `Object#toString` result references. */ +var asyncTag = '[object AsyncFunction]', + funcTag = '[object Function]', + genTag = '[object GeneratorFunction]', + proxyTag = '[object Proxy]'; /** - * Consume and convert an entire Body to a Buffer. + * Checks if `value` is classified as a `Function` object. * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a function, else `false`. + * @example * - * @return Promise + * _.isFunction(_); + * // => true + * + * _.isFunction(/abc/); + * // => false */ -function consumeBody() { - var _this4 = this; +function isFunction(value) { + if (!isObject(value)) { + return false; + } + // The use of `Object#toString` avoids issues with the `typeof` operator + // in Safari 9 which returns 'object' for typed arrays and other constructors. + var tag = baseGetTag(value); + return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; +} - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } +module.exports = isFunction; - this[INTERNALS].disturbed = true; - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } +/***/ }), - let body = this.body; +/***/ 45007: +/***/ ((module) => { - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } +/** Used as references for various `Number` constants. */ +var MAX_SAFE_INTEGER = 9007199254740991; - // body is blob - if (isBlob(body)) { - body = body.stream(); - } +/** + * Checks if `value` is a valid array-like length. + * + * **Note:** This method is loosely based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. + * @example + * + * _.isLength(3); + * // => true + * + * _.isLength(Number.MIN_VALUE); + * // => false + * + * _.isLength(Infinity); + * // => false + * + * _.isLength('3'); + * // => false + */ +function isLength(value) { + return typeof value == 'number' && + value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; +} - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } +module.exports = isLength; - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); +/***/ }), - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } +/***/ 21079: +/***/ ((module) => { - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } +/** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ +function isObject(value) { + var type = typeof value; + return value != null && (type == 'object' || type == 'function'); +} - accumBytes += chunk.length; - accum.push(chunk); - }); +module.exports = isObject; - body.on('end', function () { - if (abort) { - return; - } - clearTimeout(resTimeout); +/***/ }), - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} +/***/ 50350: +/***/ ((module) => { /** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } +function isObjectLike(value) { + return value != null && typeof value == 'object'; +} - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; +module.exports = isObjectLike; - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); +/***/ }), - // html5 - if (!res && str) { - res = / { - // html4 - if (!res && str) { - res = / false + * + * _.isPlainObject([1, 2, 3]); + * // => false + * + * _.isPlainObject({ 'x': 0, 'y': 0 }); + * // => true + * + * _.isPlainObject(Object.create(null)); + * // => true */ -function isURLSearchParams(obj) { - // Duck-typing as a necessary condition. - if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { - return false; - } - - // Brand-checking and more duck-typing as optional condition. - return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; +function isPlainObject(value) { + if (!isObjectLike(value) || baseGetTag(value) != objectTag) { + return false; + } + var proto = getPrototype(value); + if (proto === null) { + return true; + } + var Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor; + return typeof Ctor == 'function' && Ctor instanceof Ctor && + funcToString.call(Ctor) == objectCtorString; } -/** - * Check if `obj` is a W3C `Blob` object (which `File` inherits from) - * @param {*} obj - * @return {boolean} - */ -function isBlob(obj) { - return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); -} +module.exports = isPlainObject; -/** - * Clone body given Res/Req instance - * - * @param Mixed instance Response or Request instance - * @return Mixed - */ -function clone(instance) { - let p1, p2; - let body = instance.body; - // don't allow cloning a used body - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used'); - } +/***/ }), - // check that body is a stream and not form-data object - // note: we can't clone the form-data object without having it as a dependency - if (body instanceof Stream && typeof body.getBoundary !== 'function') { - // tee instance body - p1 = new PassThrough(); - p2 = new PassThrough(); - body.pipe(p1); - body.pipe(p2); - // set instance body to teed body and return the other teed body - instance[INTERNALS].body = p1; - body = p2; - } +/***/ 9056: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - return body; -} +var baseIsTypedArray = __nccwpck_require__(38332), + baseUnary = __nccwpck_require__(76067), + nodeUtil = __nccwpck_require__(46315); + +/* Node.js helper references. */ +var nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray; /** - * Performs the operation "extract a `Content-Type` value from |object|" as - * specified in the specification: - * https://fetch.spec.whatwg.org/#concept-bodyinit-extract + * Checks if `value` is classified as a typed array. * - * This function assumes that instance.body is present. + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + * @example * - * @param Mixed instance Any options.body input + * _.isTypedArray(new Uint8Array); + * // => true + * + * _.isTypedArray([]); + * // => false */ -function extractContentType(body) { - if (body === null) { - // body is null - return null; - } else if (typeof body === 'string') { - // body is string - return 'text/plain;charset=UTF-8'; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - return 'application/x-www-form-urlencoded;charset=UTF-8'; - } else if (isBlob(body)) { - // body is blob - return body.type || null; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return null; - } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - return null; - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - return null; - } else if (typeof body.getBoundary === 'function') { - // detect form data input from form-data module - return `multipart/form-data;boundary=${body.getBoundary()}`; - } else if (body instanceof Stream) { - // body is stream - // can't really do much about this - return null; - } else { - // Body constructor defaults other things to string - return 'text/plain;charset=UTF-8'; - } -} +var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray; + +module.exports = isTypedArray; + + +/***/ }), + +/***/ 13812: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var arrayLikeKeys = __nccwpck_require__(16341), + baseKeysIn = __nccwpck_require__(75027), + isArrayLike = __nccwpck_require__(60759); /** - * The Fetch Standard treats this as if "total bytes" is a property on the body. - * For us, we have to explicitly get it with a function. + * Creates an array of the own and inherited enumerable property names of `object`. * - * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes + * **Note:** Non-object values are coerced to objects. * - * @param Body instance Instance of Body - * @return Number? Number of bytes, or null if not possible + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keysIn(new Foo); + * // => ['a', 'b', 'c'] (iteration order is not guaranteed) */ -function getTotalBytes(instance) { - const body = instance.body; +function keysIn(object) { + return isArrayLike(object) ? arrayLikeKeys(object, true) : baseKeysIn(object); +} +module.exports = keysIn; - if (body === null) { - // body is null - return 0; - } else if (isBlob(body)) { - return body.size; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return body.length; - } else if (body && typeof body.getLengthSync === 'function') { - // detect form data input from form-data module - if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) { - // 2.x - return body.getLengthSync(); - } - return null; - } else { - // body is stream - return null; - } -} + +/***/ }), + +/***/ 67811: +/***/ ((module) => { /** - * Write a Body to a Node.js WritableStream (e.g. http.Request) object. + * This method returns `undefined`. * - * @param Body instance Instance of Body - * @return Void + * @static + * @memberOf _ + * @since 2.3.0 + * @category Util + * @example + * + * _.times(2, _.noop); + * // => [undefined, undefined] */ -function writeToStream(dest, instance) { - const body = instance.body; +function noop() { + // No operation performed. +} +module.exports = noop; - if (body === null) { - // body is null - dest.end(); - } else if (isBlob(body)) { - body.stream().pipe(dest); - } else if (Buffer.isBuffer(body)) { - // body is buffer - dest.write(body); - dest.end(); - } else { - // body is stream - body.pipe(dest); - } -} -// expose Promise -Body.Promise = global.Promise; +/***/ }), + +/***/ 98755: +/***/ ((module) => { /** - * headers.js + * This method returns `false`. * - * Headers class offers convenient helpers + * @static + * @memberOf _ + * @since 4.13.0 + * @category Util + * @returns {boolean} Returns `false`. + * @example + * + * _.times(2, _.stubFalse); + * // => [false, false] */ +function stubFalse() { + return false; +} -const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; +module.exports = stubFalse; -function validateName(name) { - name = `${name}`; - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`); - } -} -function validateValue(value) { - value = `${value}`; - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`); - } -} +/***/ }), + +/***/ 1214: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var baseFlatten = __nccwpck_require__(83985), + baseRest = __nccwpck_require__(77912), + baseUniq = __nccwpck_require__(43519), + isArrayLikeObject = __nccwpck_require__(82799); /** - * Find the key in the map object given a header name. + * Creates an array of unique values, in order, from all given arrays using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. * - * Returns undefined if not found. + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of combined values. + * @example * - * @param String name Header name - * @return String|Undefined + * _.union([2], [1, 2]); + * // => [2, 1] */ -function find(map, name) { - name = name.toLowerCase(); - for (const key in map) { - if (key.toLowerCase() === name) { - return key; - } - } - return undefined; -} - -const MAP = Symbol('map'); -class Headers { - /** - * Headers class - * - * @param Object headers Response headers - * @return Void - */ - constructor() { - let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; +var union = baseRest(function(arrays) { + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true)); +}); - this[MAP] = Object.create(null); +module.exports = union; - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } +/***/ }), - return; - } +/***/ 64454: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } +/** + * Module exports. + */ - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } +module.exports = __nccwpck_require__(53765) - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - return this[MAP][key].join(', '); - } +/***/ }), - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; +/***/ 36032: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; +"use strict"; +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } +/** + * Module dependencies. + * @private + */ - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } +var db = __nccwpck_require__(64454) +var extname = (__nccwpck_require__(71017).extname) - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } +/** + * Module variables. + * @private + */ - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } +/** + * Module exports. + * @public + */ - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} + if (mime && mime.charset) { + return mime.charset + } -const INTERNAL = Symbol('internal'); + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; + return false } -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str - this[INTERNAL].index = index + 1; + if (!mime) { + return false + } - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); + return mime +} /** - * Export the Headers object in a form that Node.js can consume. + * Get the default extension for a MIME type. * - * @param Headers headers - * @return Object + * @param {string} type + * @return {boolean|string} */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } - return obj; + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] } /** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. + * Lookup the MIME type for a file path/extension. * - * @param Object obj Object of headers - * @return Headers + * @param {string} path + * @return {boolean|string} */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} -const INTERNALS$1 = Symbol('Response internals'); +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} /** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void + * Populate the extensions and types maps. + * @private */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - const status = opts.status || 200; - const headers = new Headers(opts.headers); +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } + if (!exts || !exts.length) { + return + } - get url() { - return this[INTERNALS$1].url || ''; - } + // mime -> extensions + extensions[type] = exts - get status() { - return this[INTERNALS$1].status; - } + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) - get redirected() { - return this[INTERNALS$1].counter > 0; - } + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } - get statusText() { - return this[INTERNALS$1].statusText; - } + // set the extension -> mime + types[extension] = type + } + }) +} - get headers() { - return this[INTERNALS$1].headers; - } - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} +/***/ }), -Body.mixIn(Response.prototype); +/***/ 94041: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); +var path = __nccwpck_require__(71017); +var fs = __nccwpck_require__(57147); +var _0777 = parseInt('0777', 8); -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); +module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; -const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; +function mkdirP (p, opts, f, made) { + if (typeof opts === 'function') { + f = opts; + opts = {}; + } + else if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 + } + if (!made) made = null; + + var cb = f || /* istanbul ignore next */ function () {}; + p = path.resolve(p); + + xfs.mkdir(p, mode, function (er) { + if (!er) { + made = made || p; + return cb(null, made); + } + switch (er.code) { + case 'ENOENT': + /* istanbul ignore if */ + if (path.dirname(p) === p) return cb(er); + mkdirP(path.dirname(p), opts, function (er, made) { + /* istanbul ignore if */ + if (er) cb(er, made); + else mkdirP(p, opts, cb, made); + }); + break; -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + xfs.stat(p, function (er2, stat) { + // if the stat fails, then that's super weird. + // let the original error be the failure reason. + if (er2 || !stat.isDirectory()) cb(er, made) + else cb(null, made); + }); + break; + } + }); +} + +mkdirP.sync = function sync (p, opts, made) { + if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 + } + if (!made) made = null; + + p = path.resolve(p); + + try { + xfs.mkdirSync(p, mode); + made = made || p; + } + catch (err0) { + switch (err0.code) { + case 'ENOENT' : + made = sync(path.dirname(p), opts, made); + sync(p, opts, made); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + var stat; + try { + stat = xfs.statSync(p); + } + catch (err1) /* istanbul ignore next */ { + throw err0; + } + /* istanbul ignore if */ + if (!stat.isDirectory()) throw err0; + break; + } + } + + return made; +}; + + +/***/ }), + +/***/ 10154: +/***/ ((module) => { /** - * Wrapper around `new URL` to handle arbitrary URLs - * - * @param {string} urlStr - * @return {void} - */ -function parseURL(urlStr) { - /* - Check whether the URL is absolute or not - Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 - Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + * Helpers. */ - if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new URL(urlStr).toString(); - } - - // Fallback to old implementation for arbitrary URLs - return parse_url(urlStr); -} -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; /** - * Check if a value is an instance of Request. + * Parse or format the given `val`. * - * @param Mixed input - * @return Boolean + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; /** - * Request class + * Parse the given `str` and return milliseconds. * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void + * @param {String} str + * @return {Number} + * @api private */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parseURL(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parseURL(`${input}`); - } - input = {}; - } else { - parsedURL = parseURL(input.url); - } +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} - const headers = new Headers(init.headers || input.headers || {}); +/** + * Pluralization helper. + */ - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } +/***/ }), - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; +/***/ 38534: +/***/ ((module, exports, __nccwpck_require__) => { - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } +"use strict"; - get method() { - return this[INTERNALS$2].method; - } - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - get headers() { - return this[INTERNALS$2].headers; - } +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - get redirect() { - return this[INTERNALS$2].redirect; - } +var Stream = _interopDefault(__nccwpck_require__(12781)); +var http = _interopDefault(__nccwpck_require__(13685)); +var Url = _interopDefault(__nccwpck_require__(57310)); +var whatwgUrl = _interopDefault(__nccwpck_require__(52871)); +var https = _interopDefault(__nccwpck_require__(95687)); +var zlib = _interopDefault(__nccwpck_require__(59796)); - get signal() { - return this[INTERNALS$2].signal; - } +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; -Body.mixIn(Request.prototype); +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); +class Blob { + constructor() { + this[TYPE] = ''; -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); + const blobParts = arguments[0]; + const options = arguments[1]; -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); + const buffers = []; + let size = 0; - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } + this[BUFFER] = Buffer.concat(buffers); - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } } - - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + get size() { + return this[BUFFER].length; } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; + get type() { + return this[TYPE]; } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } + text() { + return Promise.resolve(this[BUFFER].toString()); } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); + toString() { + return '[object Blob]'; } + slice() { + const size = this.size; - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; } +} - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); /** - * abort-error.js + * fetch-error.js * - * AbortError interface for cancelled requests + * FetchError interface for operational errors */ /** - * Create AbortError instance + * Create FetchError instance * * @param String message Error message for human - * @return AbortError + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError */ -function AbortError(message) { +function FetchError(message, type, systemError) { Error.call(this, message); - this.type = 'aborted'; this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } // hide custom error implementation details from end-users Error.captureStackTrace(this, this.constructor); } -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -const URL$1 = Url.URL || whatwgUrl.URL; +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; +let convert; +try { + convert = (__nccwpck_require__(32431).convert); +} catch (e) {} -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; +const INTERNALS = Symbol('Body internals'); - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; /** - * Fetch function + * Body mixin * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void */ -function fetch(url, opts) { - - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; +function Body(body) { + var _this = this; - let response = null; + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - if (signal && signal.aborted) { - abort(); - return; - } + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} - // send request - const req = send(options); - let reqTimeout; +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } + get bodyUsed() { + return this[INTERNALS].disturbed; + }, - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf }); - } + }); + }, - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } }); + }, - req.on('response', function (res) { - clearTimeout(reqTimeout); + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, - const headers = createHeadersLenient(res.headers); + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; - // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; - if (!isDomainOrSubdomain(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } + this[INTERNALS].disturbed = true; - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } + let body = this.body; - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; + // body is blob + if (isBlob(body)) { + body = body.stream(); + } - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } - // HTTP-network fetch step 12.1.1.4: handle content codings + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; + return new Body.Promise(function (resolve, reject) { + let resTimeout; - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); } + }); - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); + body.on('data', function (chunk) { + if (abort || chunk === null) { return; } - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); return; } - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); + accumBytes += chunk.length; + accum.push(chunk); }); - writeToStream(req, request); + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); }); } + /** - * Redirect code matching + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding * - * @param Number code Status code - * @return Boolean + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports["default"] = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } -/***/ }), + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); -/***/ 51587: -/***/ ((module) => { + // html5 + if (!res && str) { + res = / - * - * Copyright (c) 2014-2018, Jon Schlinkert. - * Released under the MIT License. - */ + // html4 + if (!res && str) { + res = / 4 && path[3] === '\\') { - var ch = path[2]; - if ((ch === '?' || ch === '.') && path.slice(0, 2) === '\\\\') { - path = path.slice(2); - prefix = '//'; - } - } + // prevent decode issues when sites use incorrect encoding + // ref: https://hsivonen.fi/encoding-menu/ + if (charset === 'gb2312' || charset === 'gbk') { + charset = 'gb18030'; + } + } - var segs = path.split(/[/\\]+/); - if (stripTrailing !== false && segs[segs.length - 1] === '') { - segs.pop(); - } - return prefix + segs.join('/'); -}; + // turn raw buffers into a single utf-8 buffer + return convert(buffer, 'UTF-8', charset).toString(); +} +/** + * Detect a URLSearchParams object + * ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143 + * + * @param Object obj Object to detect by type or brand + * @return String + */ +function isURLSearchParams(obj) { + // Duck-typing as a necessary condition. + if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') { + return false; + } -/***/ }), + // Brand-checking and more duck-typing as optional condition. + return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function'; +} -/***/ 53183: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/** + * Check if `obj` is a W3C `Blob` object (which `File` inherits from) + * @param {*} obj + * @return {boolean} + */ +function isBlob(obj) { + return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); +} -var wrappy = __nccwpck_require__(31289) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) +/** + * Clone body given Res/Req instance + * + * @param Mixed instance Response or Request instance + * @return Mixed + */ +function clone(instance) { + let p1, p2; + let body = instance.body; -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) + // don't allow cloning a used body + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used'); + } - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) + // check that body is a stream and not form-data object + // note: we can't clone the form-data object without having it as a dependency + if (body instanceof Stream && typeof body.getBoundary !== 'function') { + // tee instance body + p1 = new PassThrough(); + p2 = new PassThrough(); + body.pipe(p1); + body.pipe(p2); + // set instance body to teed body and return the other teed body + instance[INTERNALS].body = p1; + body = p2; + } -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f + return body; } -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f +/** + * Performs the operation "extract a `Content-Type` value from |object|" as + * specified in the specification: + * https://fetch.spec.whatwg.org/#concept-bodyinit-extract + * + * This function assumes that instance.body is present. + * + * @param Mixed instance Any options.body input + */ +function extractContentType(body) { + if (body === null) { + // body is null + return null; + } else if (typeof body === 'string') { + // body is string + return 'text/plain;charset=UTF-8'; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + return 'application/x-www-form-urlencoded;charset=UTF-8'; + } else if (isBlob(body)) { + // body is blob + return body.type || null; + } else if (Buffer.isBuffer(body)) { + // body is buffer + return null; + } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + return null; + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + return null; + } else if (typeof body.getBoundary === 'function') { + // detect form data input from form-data module + return `multipart/form-data;boundary=${body.getBoundary()}`; + } else if (body instanceof Stream) { + // body is stream + // can't really do much about this + return null; + } else { + // Body constructor defaults other things to string + return 'text/plain;charset=UTF-8'; + } } +/** + * The Fetch Standard treats this as if "total bytes" is a property on the body. + * For us, we have to explicitly get it with a function. + * + * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes + * + * @param Body instance Instance of Body + * @return Number? Number of bytes, or null if not possible + */ +function getTotalBytes(instance) { + const body = instance.body; -/***/ }), - -/***/ 51831: -/***/ ((module) => { - -"use strict"; - - -if (typeof process === 'undefined' || - !process.version || - process.version.indexOf('v0.') === 0 || - process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { - module.exports = { nextTick: nextTick }; -} else { - module.exports = process -} -function nextTick(fn, arg1, arg2, arg3) { - if (typeof fn !== 'function') { - throw new TypeError('"callback" argument must be a function'); - } - var len = arguments.length; - var args, i; - switch (len) { - case 0: - case 1: - return process.nextTick(fn); - case 2: - return process.nextTick(function afterTickOne() { - fn.call(null, arg1); - }); - case 3: - return process.nextTick(function afterTickTwo() { - fn.call(null, arg1, arg2); - }); - case 4: - return process.nextTick(function afterTickThree() { - fn.call(null, arg1, arg2, arg3); - }); - default: - args = new Array(len - 1); - i = 0; - while (i < args.length) { - args[i++] = arguments[i]; - } - return process.nextTick(function afterTick() { - fn.apply(null, args); - }); - } + if (body === null) { + // body is null + return 0; + } else if (isBlob(body)) { + return body.size; + } else if (Buffer.isBuffer(body)) { + // body is buffer + return body.length; + } else if (body && typeof body.getLengthSync === 'function') { + // detect form data input from form-data module + if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) { + // 2.x + return body.getLengthSync(); + } + return null; + } else { + // body is stream + return null; + } } - - -/***/ }), - -/***/ 54340: -/***/ ((module) => { - -// for now just expose the builtin process global from node.js -module.exports = global.process; - - -/***/ }), - -/***/ 20606: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -var parseUrl = (__nccwpck_require__(57310).parse); - -var DEFAULT_PORTS = { - ftp: 21, - gopher: 70, - http: 80, - https: 443, - ws: 80, - wss: 443, -}; - -var stringEndsWith = String.prototype.endsWith || function(s) { - return s.length <= this.length && - this.indexOf(s, this.length - s.length) !== -1; -}; - /** - * @param {string|object} url - The URL, or the result from url.parse. - * @return {string} The URL of the proxy that should handle the request to the - * given URL. If no proxy is set, this will be an empty string. + * Write a Body to a Node.js WritableStream (e.g. http.Request) object. + * + * @param Body instance Instance of Body + * @return Void */ -function getProxyForUrl(url) { - var parsedUrl = typeof url === 'string' ? parseUrl(url) : url || {}; - var proto = parsedUrl.protocol; - var hostname = parsedUrl.host; - var port = parsedUrl.port; - if (typeof hostname !== 'string' || !hostname || typeof proto !== 'string') { - return ''; // Don't proxy URLs without a valid scheme or host. - } +function writeToStream(dest, instance) { + const body = instance.body; - proto = proto.split(':', 1)[0]; - // Stripping ports in this way instead of using parsedUrl.hostname to make - // sure that the brackets around IPv6 addresses are kept. - hostname = hostname.replace(/:\d*$/, ''); - port = parseInt(port) || DEFAULT_PORTS[proto] || 0; - if (!shouldProxy(hostname, port)) { - return ''; // Don't proxy URLs that match NO_PROXY. - } - var proxy = - getEnv('npm_config_' + proto + '_proxy') || - getEnv(proto + '_proxy') || - getEnv('npm_config_proxy') || - getEnv('all_proxy'); - if (proxy && proxy.indexOf('://') === -1) { - // Missing scheme in proxy, default to the requested URL's scheme. - proxy = proto + '://' + proxy; - } - return proxy; + if (body === null) { + // body is null + dest.end(); + } else if (isBlob(body)) { + body.stream().pipe(dest); + } else if (Buffer.isBuffer(body)) { + // body is buffer + dest.write(body); + dest.end(); + } else { + // body is stream + body.pipe(dest); + } } +// expose Promise +Body.Promise = global.Promise; + /** - * Determines whether a given URL should be proxied. + * headers.js * - * @param {string} hostname - The host name of the URL. - * @param {number} port - The effective port of the URL. - * @returns {boolean} Whether the given URL should be proxied. - * @private + * Headers class offers convenient helpers */ -function shouldProxy(hostname, port) { - var NO_PROXY = - (getEnv('npm_config_no_proxy') || getEnv('no_proxy')).toLowerCase(); - if (!NO_PROXY) { - return true; // Always proxy if NO_PROXY is not set. - } - if (NO_PROXY === '*') { - return false; // Never proxy if wildcard is set. - } - return NO_PROXY.split(/[,\s]/).every(function(proxy) { - if (!proxy) { - return true; // Skip zero-length hosts. - } - var parsedProxy = proxy.match(/^(.+):(\d+)$/); - var parsedProxyHostname = parsedProxy ? parsedProxy[1] : proxy; - var parsedProxyPort = parsedProxy ? parseInt(parsedProxy[2]) : 0; - if (parsedProxyPort && parsedProxyPort !== port) { - return true; // Skip if ports don't match. - } +const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; - if (!/^[.*]/.test(parsedProxyHostname)) { - // No wildcards, so stop proxying if there is an exact match. - return hostname !== parsedProxyHostname; - } +function validateName(name) { + name = `${name}`; + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`); + } +} - if (parsedProxyHostname.charAt(0) === '*') { - // Remove leading wildcard. - parsedProxyHostname = parsedProxyHostname.slice(1); - } - // Stop proxying if the hostname ends with the no_proxy host. - return !stringEndsWith.call(hostname, parsedProxyHostname); - }); +function validateValue(value) { + value = `${value}`; + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`); + } } /** - * Get the value for an environment variable. + * Find the key in the map object given a header name. * - * @param {string} key - The name of the environment variable. - * @return {string} The value of the environment variable. - * @private + * Returns undefined if not found. + * + * @param String name Header name + * @return String|Undefined */ -function getEnv(key) { - return process.env[key.toLowerCase()] || process.env[key.toUpperCase()] || ''; +function find(map, name) { + name = name.toLowerCase(); + for (const key in map) { + if (key.toLowerCase() === name) { + return key; + } + } + return undefined; } -exports.getProxyForUrl = getProxyForUrl; +const MAP = Symbol('map'); +class Headers { + /** + * Headers class + * + * @param Object headers Response headers + * @return Void + */ + constructor() { + let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; + this[MAP] = Object.create(null); -/***/ }), + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); -/***/ 15034: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } -module.exports = (typeof process !== 'undefined' && typeof process.nextTick === 'function') - ? process.nextTick.bind(process) - : __nccwpck_require__(61737) + return; + } + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } -/***/ }), + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } -/***/ 61737: -/***/ ((module) => { + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } -module.exports = typeof queueMicrotask === 'function' ? queueMicrotask : (fn) => Promise.resolve().then(fn) + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + return this[MAP][key].join(', '); + } -/***/ }), + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; -/***/ 50275: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; -"use strict"; + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } -const { SymbolDispose } = __nccwpck_require__(62141) -const { AbortError, codes } = __nccwpck_require__(49939) -const { isNodeStream, isWebStream, kControllerErrorFunction } = __nccwpck_require__(46995) -const eos = __nccwpck_require__(89217) -const { ERR_INVALID_ARG_TYPE } = codes -let addAbortListener - -// This method is inlined here for readable-stream -// It also does not allow for signal to not exist on the stream -// https://github.com/nodejs/node/pull/36061#discussion_r533718029 -const validateAbortSignal = (signal, name) => { - if (typeof signal !== 'object' || !('aborted' in signal)) { - throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal) - } -} -module.exports.addAbortSignal = function addAbortSignal(signal, stream) { - validateAbortSignal(signal, 'signal') - if (!isNodeStream(stream) && !isWebStream(stream)) { - throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream) - } - return module.exports.addAbortSignalNoValidate(signal, stream) -} -module.exports.addAbortSignalNoValidate = function (signal, stream) { - if (typeof signal !== 'object' || !('aborted' in signal)) { - return stream - } - const onAbort = isNodeStream(stream) - ? () => { - stream.destroy( - new AbortError(undefined, { - cause: signal.reason - }) - ) - } - : () => { - stream[kControllerErrorFunction]( - new AbortError(undefined, { - cause: signal.reason - }) - ) - } - if (signal.aborted) { - onAbort() - } else { - addAbortListener = addAbortListener || (__nccwpck_require__(32504).addAbortListener) - const disposable = addAbortListener(signal, onAbort) - eos(stream, disposable[SymbolDispose]) - } - return stream -} - - -/***/ }), - -/***/ 75501: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } -const { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = __nccwpck_require__(62141) -const { Buffer } = __nccwpck_require__(14300) -const { inspect } = __nccwpck_require__(32504) -module.exports = class BufferList { - constructor() { - this.head = null - this.tail = null - this.length = 0 - } - push(v) { - const entry = { - data: v, - next: null - } - if (this.length > 0) this.tail.next = entry - else this.head = entry - this.tail = entry - ++this.length - } - unshift(v) { - const entry = { - data: v, - next: this.head - } - if (this.length === 0) this.tail = entry - this.head = entry - ++this.length - } - shift() { - if (this.length === 0) return - const ret = this.head.data - if (this.length === 1) this.head = this.tail = null - else this.head = this.head.next - --this.length - return ret - } - clear() { - this.head = this.tail = null - this.length = 0 - } - join(s) { - if (this.length === 0) return '' - let p = this.head - let ret = '' + p.data - while ((p = p.next) !== null) ret += s + p.data - return ret - } - concat(n) { - if (this.length === 0) return Buffer.alloc(0) - const ret = Buffer.allocUnsafe(n >>> 0) - let p = this.head - let i = 0 - while (p) { - TypedArrayPrototypeSet(ret, p.data, i) - i += p.data.length - p = p.next - } - return ret - } + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } - // Consumes a specified amount of bytes or characters from the buffered data. - consume(n, hasStrings) { - const data = this.head.data - if (n < data.length) { - // `slice` is the same for buffers and strings. - const slice = data.slice(0, n) - this.head.data = data.slice(n) - return slice - } - if (n === data.length) { - // First chunk is a perfect match. - return this.shift() - } - // Result spans more than one buffer. - return hasStrings ? this._getString(n) : this._getBuffer(n) - } - first() { - return this.head.data - } - *[SymbolIterator]() { - for (let p = this.head; p; p = p.next) { - yield p.data - } - } + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } - // Consumes a specified amount of characters from the buffered data. - _getString(n) { - let ret = '' - let p = this.head - let c = 0 - do { - const str = p.data - if (n > str.length) { - ret += str - n -= str.length - } else { - if (n === str.length) { - ret += str - ++c - if (p.next) this.head = p.next - else this.head = this.tail = null - } else { - ret += StringPrototypeSlice(str, 0, n) - this.head = p - p.data = StringPrototypeSlice(str, n) - } - break - } - ++c - } while ((p = p.next) !== null) - this.length -= c - return ret - } + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } - // Consumes a specified amount of bytes from the buffered data. - _getBuffer(n) { - const ret = Buffer.allocUnsafe(n) - const retLen = n - let p = this.head - let c = 0 - do { - const buf = p.data - if (n > buf.length) { - TypedArrayPrototypeSet(ret, buf, retLen - n) - n -= buf.length - } else { - if (n === buf.length) { - TypedArrayPrototypeSet(ret, buf, retLen - n) - ++c - if (p.next) this.head = p.next - else this.head = this.tail = null - } else { - TypedArrayPrototypeSet(ret, new Uint8Array(buf.buffer, buf.byteOffset, n), retLen - n) - this.head = p - p.data = buf.slice(n) - } - break - } - ++c - } while ((p = p.next) !== null) - this.length -= c - return ret - } + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } - // Make sure the linked list only shows the minimal necessary information. - [Symbol.for('nodejs.util.inspect.custom')](_, options) { - return inspect(this, { - ...options, - // Only inspect one level. - depth: 0, - // It should not recurse. - customInspect: false - }) - } + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } } +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); -/***/ }), - -/***/ 83913: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); -"use strict"; +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} -const { pipeline } = __nccwpck_require__(32361) -const Duplex = __nccwpck_require__(7280) -const { destroyer } = __nccwpck_require__(47493) -const { - isNodeStream, - isReadable, - isWritable, - isWebStream, - isTransformStream, - isWritableStream, - isReadableStream -} = __nccwpck_require__(46995) -const { - AbortError, - codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS } -} = __nccwpck_require__(49939) -const eos = __nccwpck_require__(89217) -module.exports = function compose(...streams) { - if (streams.length === 0) { - throw new ERR_MISSING_ARGS('streams') - } - if (streams.length === 1) { - return Duplex.from(streams[0]) - } - const orgStreams = [...streams] - if (typeof streams[0] === 'function') { - streams[0] = Duplex.from(streams[0]) - } - if (typeof streams[streams.length - 1] === 'function') { - const idx = streams.length - 1 - streams[idx] = Duplex.from(streams[idx]) - } - for (let n = 0; n < streams.length; ++n) { - if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) { - // TODO(ronag): Add checks for non streams. - continue - } - if ( - n < streams.length - 1 && - !(isReadable(streams[n]) || isReadableStream(streams[n]) || isTransformStream(streams[n])) - ) { - throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable') - } - if (n > 0 && !(isWritable(streams[n]) || isWritableStream(streams[n]) || isTransformStream(streams[n]))) { - throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable') - } - } - let ondrain - let onfinish - let onreadable - let onclose - let d - function onfinished(err) { - const cb = onclose - onclose = null - if (cb) { - cb(err) - } else if (err) { - d.destroy(err) - } else if (!readable && !writable) { - d.destroy() - } - } - const head = streams[0] - const tail = pipeline(streams, onfinished) - const writable = !!(isWritable(head) || isWritableStream(head) || isTransformStream(head)) - const readable = !!(isReadable(tail) || isReadableStream(tail) || isTransformStream(tail)) +const INTERNAL = Symbol('internal'); - // TODO(ronag): Avoid double buffering. - // Implement Writable/Readable/Duplex traits. - // See, https://github.com/nodejs/node/pull/33515. - d = new Duplex({ - // TODO (ronag): highWaterMark? - writableObjectMode: !!(head !== null && head !== undefined && head.writableObjectMode), - readableObjectMode: !!(tail !== null && tail !== undefined && tail.readableObjectMode), - writable, - readable - }) - if (writable) { - if (isNodeStream(head)) { - d._write = function (chunk, encoding, callback) { - if (head.write(chunk, encoding)) { - callback() - } else { - ondrain = callback - } - } - d._final = function (callback) { - head.end() - onfinish = callback - } - head.on('drain', function () { - if (ondrain) { - const cb = ondrain - ondrain = null - cb() - } - }) - } else if (isWebStream(head)) { - const writable = isTransformStream(head) ? head.writable : head - const writer = writable.getWriter() - d._write = async function (chunk, encoding, callback) { - try { - await writer.ready - writer.write(chunk).catch(() => {}) - callback() - } catch (err) { - callback(err) - } - } - d._final = async function (callback) { - try { - await writer.ready - writer.close().catch(() => {}) - onfinish = callback - } catch (err) { - callback(err) - } - } - } - const toRead = isTransformStream(tail) ? tail.readable : tail - eos(toRead, () => { - if (onfinish) { - const cb = onfinish - onfinish = null - cb() - } - }) - } - if (readable) { - if (isNodeStream(tail)) { - tail.on('readable', function () { - if (onreadable) { - const cb = onreadable - onreadable = null - cb() - } - }) - tail.on('end', function () { - d.push(null) - }) - d._read = function () { - while (true) { - const buf = tail.read() - if (buf === null) { - onreadable = d._read - return - } - if (!d.push(buf)) { - return - } - } - } - } else if (isWebStream(tail)) { - const readable = isTransformStream(tail) ? tail.readable : tail - const reader = readable.getReader() - d._read = async function () { - while (true) { - try { - const { value, done } = await reader.read() - if (!d.push(value)) { - return - } - if (done) { - d.push(null) - return - } - } catch { - return - } - } - } - } - } - d._destroy = function (err, callback) { - if (!err && onclose !== null) { - err = new AbortError() - } - onreadable = null - ondrain = null - onfinish = null - if (onclose === null) { - callback(err) - } else { - onclose = callback - if (isNodeStream(tail)) { - destroyer(tail, err) - } - } - } - return d +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; } +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } -/***/ }), - -/***/ 47493: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; -"use strict"; + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + this[INTERNAL].index = index + 1; -/* replacement start */ + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); -const process = __nccwpck_require__(54340) +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); -/* replacement end */ +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); -const { - aggregateTwoErrors, - codes: { ERR_MULTIPLE_CALLBACK }, - AbortError -} = __nccwpck_require__(49939) -const { Symbol } = __nccwpck_require__(62141) -const { kIsDestroyed, isDestroyed, isFinished, isServerRequest } = __nccwpck_require__(46995) -const kDestroy = Symbol('kDestroy') -const kConstruct = Symbol('kConstruct') -function checkError(err, w, r) { - if (err) { - // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 - err.stack // eslint-disable-line no-unused-expressions + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } - if (w && !w.errored) { - w.errored = err - } - if (r && !r.errored) { - r.errored = err - } - } + return obj; } -// Backwards compat. cb() is undocumented and unused in core but -// unfortunately might be used by modules. -function destroy(err, cb) { - const r = this._readableState - const w = this._writableState - // With duplex streams we use the writable side for state. - const s = w || r - if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) { - if (typeof cb === 'function') { - cb() - } - return this - } - - // We set destroyed to true before firing error callbacks in order - // to make it re-entrance safe in case destroy() is called within callbacks - checkError(err, w, r) - if (w) { - w.destroyed = true - } - if (r) { - r.destroyed = true - } - - // If still constructing then defer calling _destroy. - if (!s.constructed) { - this.once(kDestroy, function (er) { - _destroy(this, aggregateTwoErrors(er, err), cb) - }) - } else { - _destroy(this, err, cb) - } - return this -} -function _destroy(self, err, cb) { - let called = false - function onDestroy(err) { - if (called) { - return - } - called = true - const r = self._readableState - const w = self._writableState - checkError(err, w, r) - if (w) { - w.closed = true - } - if (r) { - r.closed = true - } - if (typeof cb === 'function') { - cb(err) - } - if (err) { - process.nextTick(emitErrorCloseNT, self, err) - } else { - process.nextTick(emitCloseNT, self) - } - } - try { - self._destroy(err || null, onDestroy) - } catch (err) { - onDestroy(err) - } -} -function emitErrorCloseNT(self, err) { - emitErrorNT(self, err) - emitCloseNT(self) -} -function emitCloseNT(self) { - const r = self._readableState - const w = self._writableState - if (w) { - w.closeEmitted = true - } - if (r) { - r.closeEmitted = true - } - if ((w !== null && w !== undefined && w.emitClose) || (r !== null && r !== undefined && r.emitClose)) { - self.emit('close') - } -} -function emitErrorNT(self, err) { - const r = self._readableState - const w = self._writableState - if ((w !== null && w !== undefined && w.errorEmitted) || (r !== null && r !== undefined && r.errorEmitted)) { - return - } - if (w) { - w.errorEmitted = true - } - if (r) { - r.errorEmitted = true - } - self.emit('error', err) -} -function undestroy() { - const r = this._readableState - const w = this._writableState - if (r) { - r.constructed = true - r.closed = false - r.closeEmitted = false - r.destroyed = false - r.errored = null - r.errorEmitted = false - r.reading = false - r.ended = r.readable === false - r.endEmitted = r.readable === false - } - if (w) { - w.constructed = true - w.destroyed = false - w.closed = false - w.closeEmitted = false - w.errored = null - w.errorEmitted = false - w.finalCalled = false - w.prefinished = false - w.ended = w.writable === false - w.ending = w.writable === false - w.finished = w.writable === false - } +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; } -function errorOrDestroy(stream, err, sync) { - // We have tests that rely on errors being emitted - // in the same tick, so changing this is semver major. - // For now when you opt-in to autoDestroy we allow - // the error to be emitted nextTick. In a future - // semver major update we should change the default to this. - const r = stream._readableState - const w = stream._writableState - if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) { - return this - } - if ((r !== null && r !== undefined && r.autoDestroy) || (w !== null && w !== undefined && w.autoDestroy)) - stream.destroy(err) - else if (err) { - // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 - err.stack // eslint-disable-line no-unused-expressions +const INTERNALS$1 = Symbol('Response internals'); - if (w && !w.errored) { - w.errored = err - } - if (r && !r.errored) { - r.errored = err - } - if (sync) { - process.nextTick(emitErrorNT, stream, err) - } else { - emitErrorNT(stream, err) - } - } -} -function construct(stream, cb) { - if (typeof stream._construct !== 'function') { - return - } - const r = stream._readableState - const w = stream._writableState - if (r) { - r.constructed = false - } - if (w) { - w.constructed = false - } - stream.once(kConstruct, cb) - if (stream.listenerCount(kConstruct) > 1) { - // Duplex - return - } - process.nextTick(constructNT, stream) -} -function constructNT(stream) { - let called = false - function onConstruct(err) { - if (called) { - errorOrDestroy(stream, err !== null && err !== undefined ? err : new ERR_MULTIPLE_CALLBACK()) - return - } - called = true - const r = stream._readableState - const w = stream._writableState - const s = w || r - if (r) { - r.constructed = true - } - if (w) { - w.constructed = true - } - if (s.destroyed) { - stream.emit(kDestroy, err) - } else if (err) { - errorOrDestroy(stream, err, true) - } else { - process.nextTick(emitConstructNT, stream) - } - } - try { - stream._construct((err) => { - process.nextTick(onConstruct, err) - }) - } catch (err) { - process.nextTick(onConstruct, err) - } -} -function emitConstructNT(stream) { - stream.emit(kConstruct) -} -function isRequest(stream) { - return (stream === null || stream === undefined ? undefined : stream.setHeader) && typeof stream.abort === 'function' -} -function emitCloseLegacy(stream) { - stream.emit('close') -} -function emitErrorCloseLegacy(stream, err) { - stream.emit('error', err) - process.nextTick(emitCloseLegacy, stream) -} +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; -// Normalize destroy for legacy. -function destroyer(stream, err) { - if (!stream || isDestroyed(stream)) { - return - } - if (!err && !isFinished(stream)) { - err = new AbortError() - } +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - // TODO: Remove isRequest branches. - if (isServerRequest(stream)) { - stream.socket = null - stream.destroy(err) - } else if (isRequest(stream)) { - stream.abort() - } else if (isRequest(stream.req)) { - stream.req.abort() - } else if (typeof stream.destroy === 'function') { - stream.destroy(err) - } else if (typeof stream.close === 'function') { - // TODO: Don't lose err? - stream.close() - } else if (err) { - process.nextTick(emitErrorCloseLegacy, stream, err) - } else { - process.nextTick(emitCloseLegacy, stream) - } - if (!stream.destroyed) { - stream[kIsDestroyed] = true - } -} -module.exports = { - construct, - destroyer, - destroy, - undestroy, - errorOrDestroy -} + Body.call(this, body, opts); + const status = opts.status || 200; + const headers = new Headers(opts.headers); -/***/ }), + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } -/***/ 7280: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + get url() { + return this[INTERNALS$1].url || ''; + } -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototype inheritance, this class -// prototypically inherits from Readable, and then parasitically from -// Writable. + get status() { + return this[INTERNALS$1].status; + } + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + get redirected() { + return this[INTERNALS$1].counter > 0; + } -const { - ObjectDefineProperties, - ObjectGetOwnPropertyDescriptor, - ObjectKeys, - ObjectSetPrototypeOf -} = __nccwpck_require__(62141) -module.exports = Duplex -const Readable = __nccwpck_require__(14252) -const Writable = __nccwpck_require__(99601) -ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype) -ObjectSetPrototypeOf(Duplex, Readable) -{ - const keys = ObjectKeys(Writable.prototype) - // Allow the keys array to be GC'ed. - for (let i = 0; i < keys.length; i++) { - const method = keys[i] - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method] - } -} -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options) - Readable.call(this, options) - Writable.call(this, options) - if (options) { - this.allowHalfOpen = options.allowHalfOpen !== false - if (options.readable === false) { - this._readableState.readable = false - this._readableState.ended = true - this._readableState.endEmitted = true - } - if (options.writable === false) { - this._writableState.writable = false - this._writableState.ending = true - this._writableState.ended = true - this._writableState.finished = true - } - } else { - this.allowHalfOpen = true - } -} -ObjectDefineProperties(Duplex.prototype, { - writable: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable') - }, - writableHighWaterMark: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark') - }, - writableObjectMode: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode') - }, - writableBuffer: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer') - }, - writableLength: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength') - }, - writableFinished: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished') - }, - writableCorked: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked') - }, - writableEnded: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded') - }, - writableNeedDrain: { - __proto__: null, - ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain') - }, - destroyed: { - __proto__: null, - get() { - if (this._readableState === undefined || this._writableState === undefined) { - return false - } - return this._readableState.destroyed && this._writableState.destroyed - }, - set(value) { - // Backward compatibility, the user is explicitly - // managing destroyed. - if (this._readableState && this._writableState) { - this._readableState.destroyed = value - this._writableState.destroyed = value - } - } - } -}) -let webStreamsAdapters + get statusText() { + return this[INTERNALS$1].statusText; + } -// Lazy to avoid circular references -function lazyWebStreams() { - if (webStreamsAdapters === undefined) webStreamsAdapters = {} - return webStreamsAdapters -} -Duplex.fromWeb = function (pair, options) { - return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options) -} -Duplex.toWeb = function (duplex) { - return lazyWebStreams().newReadableWritablePairFromDuplex(duplex) -} -let duplexify -Duplex.from = function (body) { - if (!duplexify) { - duplexify = __nccwpck_require__(4997) - } - return duplexify(body, 'body') -} + get headers() { + return this[INTERNALS$1].headers; + } + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} -/***/ }), +Body.mixIn(Response.prototype); -/***/ 4997: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); -/* replacement start */ +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); -const process = __nccwpck_require__(54340) +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; -/* replacement end */ +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; -;('use strict') -const bufferModule = __nccwpck_require__(14300) -const { - isReadable, - isWritable, - isIterable, - isNodeStream, - isReadableNodeStream, - isWritableNodeStream, - isDuplexNodeStream, - isReadableStream, - isWritableStream -} = __nccwpck_require__(46995) -const eos = __nccwpck_require__(89217) -const { - AbortError, - codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE } -} = __nccwpck_require__(49939) -const { destroyer } = __nccwpck_require__(47493) -const Duplex = __nccwpck_require__(7280) -const Readable = __nccwpck_require__(14252) -const Writable = __nccwpck_require__(99601) -const { createDeferredPromise } = __nccwpck_require__(32504) -const from = __nccwpck_require__(42725) -const Blob = globalThis.Blob || bufferModule.Blob -const isBlob = - typeof Blob !== 'undefined' - ? function isBlob(b) { - return b instanceof Blob - } - : function isBlob(b) { - return false - } -const AbortController = globalThis.AbortController || (__nccwpck_require__(43747).AbortController) -const { FunctionPrototypeCall } = __nccwpck_require__(62141) +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } -// This is needed for pre node 17. -class Duplexify extends Duplex { - constructor(options) { - super(options) + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} - // https://github.com/nodejs/node/pull/34385 +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - if ((options === null || options === undefined ? undefined : options.readable) === false) { - this._readableState.readable = false - this._readableState.ended = true - this._readableState.endEmitted = true - } - if ((options === null || options === undefined ? undefined : options.writable) === false) { - this._writableState.writable = false - this._writableState.ending = true - this._writableState.ended = true - this._writableState.finished = true - } - } -} -module.exports = function duplexify(body, name) { - if (isDuplexNodeStream(body)) { - return body - } - if (isReadableNodeStream(body)) { - return _duplexify({ - readable: body - }) - } - if (isWritableNodeStream(body)) { - return _duplexify({ - writable: body - }) - } - if (isNodeStream(body)) { - return _duplexify({ - writable: false, - readable: false - }) - } - if (isReadableStream(body)) { - return _duplexify({ - readable: Readable.fromWeb(body) - }) - } - if (isWritableStream(body)) { - return _duplexify({ - writable: Writable.fromWeb(body) - }) - } - if (typeof body === 'function') { - const { value, write, final, destroy } = fromAsyncGen(body) - if (isIterable(value)) { - return from(Duplexify, value, { - // TODO (ronag): highWaterMark? - objectMode: true, - write, - final, - destroy - }) - } - const then = value === null || value === undefined ? undefined : value.then - if (typeof then === 'function') { - let d - const promise = FunctionPrototypeCall( - then, - value, - (val) => { - if (val != null) { - throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val) - } - }, - (err) => { - destroyer(d, err) - } - ) - return (d = new Duplexify({ - // TODO (ronag): highWaterMark? - objectMode: true, - readable: false, - write, - final(cb) { - final(async () => { - try { - await promise - process.nextTick(cb, null) - } catch (err) { - process.nextTick(cb, err) - } - }) - }, - destroy - })) - } - throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or AsyncFunction', name, value) - } - if (isBlob(body)) { - return duplexify(body.arrayBuffer()) - } - if (isIterable(body)) { - return from(Duplexify, body, { - // TODO (ronag): highWaterMark? - objectMode: true, - writable: false - }) - } - if ( - isReadableStream(body === null || body === undefined ? undefined : body.readable) && - isWritableStream(body === null || body === undefined ? undefined : body.writable) - ) { - return Duplexify.fromWeb(body) - } - if ( - typeof (body === null || body === undefined ? undefined : body.writable) === 'object' || - typeof (body === null || body === undefined ? undefined : body.readable) === 'object' - ) { - const readable = - body !== null && body !== undefined && body.readable - ? isReadableNodeStream(body === null || body === undefined ? undefined : body.readable) - ? body === null || body === undefined - ? undefined - : body.readable - : duplexify(body.readable) - : undefined - const writable = - body !== null && body !== undefined && body.writable - ? isWritableNodeStream(body === null || body === undefined ? undefined : body.writable) - ? body === null || body === undefined - ? undefined - : body.writable - : duplexify(body.writable) - : undefined - return _duplexify({ - readable, - writable - }) - } - const then = body === null || body === undefined ? undefined : body.then - if (typeof then === 'function') { - let d - FunctionPrototypeCall( - then, - body, - (val) => { - if (val != null) { - d.push(val) - } - d.push(null) - }, - (err) => { - destroyer(d, err) - } - ) - return (d = new Duplexify({ - objectMode: true, - writable: false, - read() {} - })) - } - throw new ERR_INVALID_ARG_TYPE( - name, - [ - 'Blob', - 'ReadableStream', - 'WritableStream', - 'Stream', - 'Iterable', - 'AsyncIterable', - 'Function', - '{ readable, writable } pair', - 'Promise' - ], - body - ) -} -function fromAsyncGen(fn) { - let { promise, resolve } = createDeferredPromise() - const ac = new AbortController() - const signal = ac.signal - const value = fn( - (async function* () { - while (true) { - const _promise = promise - promise = null - const { chunk, done, cb } = await _promise - process.nextTick(cb) - if (done) return - if (signal.aborted) - throw new AbortError(undefined, { - cause: signal.reason - }) - ;({ promise, resolve } = createDeferredPromise()) - yield chunk - } - })(), - { - signal - } - ) - return { - value, - write(chunk, encoding, cb) { - const _resolve = resolve - resolve = null - _resolve({ - chunk, - done: false, - cb - }) - }, - final(cb) { - const _resolve = resolve - resolve = null - _resolve({ - done: true, - cb - }) - }, - destroy(err, cb) { - ac.abort() - cb(err) - } - } +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; } -function _duplexify(pair) { - const r = pair.readable && typeof pair.readable.read !== 'function' ? Readable.wrap(pair.readable) : pair.readable - const w = pair.writable - let readable = !!isReadable(r) - let writable = !!isWritable(w) - let ondrain - let onfinish - let onreadable - let onclose - let d - function onfinished(err) { - const cb = onclose - onclose = null - if (cb) { - cb(err) - } else if (err) { - d.destroy(err) - } - } - // TODO(ronag): Avoid double buffering. - // Implement Writable/Readable/Duplex traits. - // See, https://github.com/nodejs/node/pull/33515. - d = new Duplexify({ - // TODO (ronag): highWaterMark? - readableObjectMode: !!(r !== null && r !== undefined && r.readableObjectMode), - writableObjectMode: !!(w !== null && w !== undefined && w.writableObjectMode), - readable, - writable - }) - if (writable) { - eos(w, (err) => { - writable = false - if (err) { - destroyer(r, err) - } - onfinished(err) - }) - d._write = function (chunk, encoding, callback) { - if (w.write(chunk, encoding)) { - callback() - } else { - ondrain = callback - } - } - d._final = function (callback) { - w.end() - onfinish = callback - } - w.on('drain', function () { - if (ondrain) { - const cb = ondrain - ondrain = null - cb() - } - }) - w.on('finish', function () { - if (onfinish) { - const cb = onfinish - onfinish = null - cb() - } - }) - } - if (readable) { - eos(r, (err) => { - readable = false - if (err) { - destroyer(r, err) - } - onfinished(err) - }) - r.on('readable', function () { - if (onreadable) { - const cb = onreadable - onreadable = null - cb() - } - }) - r.on('end', function () { - d.push(null) - }) - d._read = function () { - while (true) { - const buf = r.read() - if (buf === null) { - onreadable = d._read - return - } - if (!d.push(buf)) { - return - } - } - } - } - d._destroy = function (err, callback) { - if (!err && onclose !== null) { - err = new AbortError() - } - onreadable = null - ondrain = null - onfinish = null - if (onclose === null) { - callback(err) - } else { - onclose = callback - destroyer(w, err) - destroyer(r, err) - } - } - return d +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); } +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; -/***/ }), + let parsedURL; -/***/ 89217: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } -/* replacement start */ + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); -const process = __nccwpck_require__(54340) + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } -/* replacement end */ -// Ported from https://github.com/mafintosh/end-of-stream with -// permission from the author, Mathias Buus (@mafintosh). + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; -;('use strict') -const { AbortError, codes } = __nccwpck_require__(49939) -const { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes -const { kEmptyObject, once } = __nccwpck_require__(32504) -const { validateAbortSignal, validateFunction, validateObject, validateBoolean } = __nccwpck_require__(95529) -const { Promise, PromisePrototypeThen, SymbolDispose } = __nccwpck_require__(62141) -const { - isClosed, - isReadable, - isReadableNodeStream, - isReadableStream, - isReadableFinished, - isReadableErrored, - isWritable, - isWritableNodeStream, - isWritableStream, - isWritableFinished, - isWritableErrored, - isNodeStream, - willEmitClose: _willEmitClose, - kIsClosedPromise -} = __nccwpck_require__(46995) -let addAbortListener -function isRequest(stream) { - return stream.setHeader && typeof stream.abort === 'function' -} -const nop = () => {} -function eos(stream, options, callback) { - var _options$readable, _options$writable - if (arguments.length === 2) { - callback = options - options = kEmptyObject - } else if (options == null) { - options = kEmptyObject - } else { - validateObject(options, 'options') - } - validateFunction(callback, 'callback') - validateAbortSignal(options.signal, 'options.signal') - callback = once(callback) - if (isReadableStream(stream) || isWritableStream(stream)) { - return eosWeb(stream, options, callback) - } - if (!isNodeStream(stream)) { - throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream) - } - const readable = - (_options$readable = options.readable) !== null && _options$readable !== undefined - ? _options$readable - : isReadableNodeStream(stream) - const writable = - (_options$writable = options.writable) !== null && _options$writable !== undefined - ? _options$writable - : isWritableNodeStream(stream) - const wState = stream._writableState - const rState = stream._readableState - const onlegacyfinish = () => { - if (!stream.writable) { - onfinish() - } - } + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); - // TODO (ronag): Improve soft detection to include core modules and - // common ecosystem modules that do properly emit 'close' but fail - // this generic check. - let willEmitClose = - _willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable - let writableFinished = isWritableFinished(stream, false) - const onfinish = () => { - writableFinished = true - // Stream should not be destroyed here. If it is that - // means that user space is doing something differently and - // we cannot trust willEmitClose. - if (stream.destroyed) { - willEmitClose = false - } - if (willEmitClose && (!stream.readable || readable)) { - return - } - if (!readable || readableFinished) { - callback.call(stream) - } - } - let readableFinished = isReadableFinished(stream, false) - const onend = () => { - readableFinished = true - // Stream should not be destroyed here. If it is that - // means that user space is doing something differently and - // we cannot trust willEmitClose. - if (stream.destroyed) { - willEmitClose = false - } - if (willEmitClose && (!stream.writable || writable)) { - return - } - if (!writable || writableFinished) { - callback.call(stream) - } - } - const onerror = (err) => { - callback.call(stream, err) - } - let closed = isClosed(stream) - const onclose = () => { - closed = true - const errored = isWritableErrored(stream) || isReadableErrored(stream) - if (errored && typeof errored !== 'boolean') { - return callback.call(stream, errored) - } - if (readable && !readableFinished && isReadableNodeStream(stream, true)) { - if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()) - } - if (writable && !writableFinished) { - if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()) - } - callback.call(stream) - } - const onclosed = () => { - closed = true - const errored = isWritableErrored(stream) || isReadableErrored(stream) - if (errored && typeof errored !== 'boolean') { - return callback.call(stream, errored) - } - callback.call(stream) - } - const onrequest = () => { - stream.req.on('finish', onfinish) - } - if (isRequest(stream)) { - stream.on('complete', onfinish) - if (!willEmitClose) { - stream.on('abort', onclose) - } - if (stream.req) { - onrequest() - } else { - stream.on('request', onrequest) - } - } else if (writable && !wState) { - // legacy streams - stream.on('end', onlegacyfinish) - stream.on('close', onlegacyfinish) - } + const headers = new Headers(init.headers || input.headers || {}); - // Not all streams will emit 'close' after 'aborted'. - if (!willEmitClose && typeof stream.aborted === 'boolean') { - stream.on('aborted', onclose) - } - stream.on('end', onend) - stream.on('finish', onfinish) - if (options.error !== false) { - stream.on('error', onerror) - } - stream.on('close', onclose) - if (closed) { - process.nextTick(onclose) - } else if ( - (wState !== null && wState !== undefined && wState.errorEmitted) || - (rState !== null && rState !== undefined && rState.errorEmitted) - ) { - if (!willEmitClose) { - process.nextTick(onclosed) - } - } else if ( - !readable && - (!willEmitClose || isReadable(stream)) && - (writableFinished || isWritable(stream) === false) - ) { - process.nextTick(onclosed) - } else if ( - !writable && - (!willEmitClose || isWritable(stream)) && - (readableFinished || isReadable(stream) === false) - ) { - process.nextTick(onclosed) - } else if (rState && stream.req && stream.aborted) { - process.nextTick(onclosed) - } - const cleanup = () => { - callback = nop - stream.removeListener('aborted', onclose) - stream.removeListener('complete', onfinish) - stream.removeListener('abort', onclose) - stream.removeListener('request', onrequest) - if (stream.req) stream.req.removeListener('finish', onfinish) - stream.removeListener('end', onlegacyfinish) - stream.removeListener('close', onlegacyfinish) - stream.removeListener('finish', onfinish) - stream.removeListener('end', onend) - stream.removeListener('error', onerror) - stream.removeListener('close', onclose) - } - if (options.signal && !closed) { - const abort = () => { - // Keep it because cleanup removes it. - const endCallback = callback - cleanup() - endCallback.call( - stream, - new AbortError(undefined, { - cause: options.signal.reason - }) - ) - } - if (options.signal.aborted) { - process.nextTick(abort) - } else { - addAbortListener = addAbortListener || (__nccwpck_require__(32504).addAbortListener) - const disposable = addAbortListener(options.signal, abort) - const originalCallback = callback - callback = once((...args) => { - disposable[SymbolDispose]() - originalCallback.apply(stream, args) - }) - } - } - return cleanup -} -function eosWeb(stream, options, callback) { - let isAborted = false - let abort = nop - if (options.signal) { - abort = () => { - isAborted = true - callback.call( - stream, - new AbortError(undefined, { - cause: options.signal.reason - }) - ) - } - if (options.signal.aborted) { - process.nextTick(abort) - } else { - addAbortListener = addAbortListener || (__nccwpck_require__(32504).addAbortListener) - const disposable = addAbortListener(options.signal, abort) - const originalCallback = callback - callback = once((...args) => { - disposable[SymbolDispose]() - originalCallback.apply(stream, args) - }) - } - } - const resolverFn = (...args) => { - if (!isAborted) { - process.nextTick(() => callback.apply(stream, args)) - } - } - PromisePrototypeThen(stream[kIsClosedPromise].promise, resolverFn, resolverFn) - return nop -} -function finished(stream, opts) { - var _opts - let autoCleanup = false - if (opts === null) { - opts = kEmptyObject - } - if ((_opts = opts) !== null && _opts !== undefined && _opts.cleanup) { - validateBoolean(opts.cleanup, 'cleanup') - autoCleanup = opts.cleanup - } - return new Promise((resolve, reject) => { - const cleanup = eos(stream, opts, (err) => { - if (autoCleanup) { - cleanup() - } - if (err) { - reject(err) - } else { - resolve() - } - }) - }) -} -module.exports = eos -module.exports.finished = finished + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; -/***/ }), + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } -/***/ 42725: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; -"use strict"; + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + get method() { + return this[INTERNALS$2].method; + } -/* replacement start */ + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } -const process = __nccwpck_require__(54340) + get headers() { + return this[INTERNALS$2].headers; + } -/* replacement end */ + get redirect() { + return this[INTERNALS$2].redirect; + } -const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = __nccwpck_require__(62141) -const { Buffer } = __nccwpck_require__(14300) -const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = (__nccwpck_require__(49939).codes) -function from(Readable, iterable, opts) { - let iterator - if (typeof iterable === 'string' || iterable instanceof Buffer) { - return new Readable({ - objectMode: true, - ...opts, - read() { - this.push(iterable) - this.push(null) - } - }) - } - let isAsync - if (iterable && iterable[SymbolAsyncIterator]) { - isAsync = true - iterator = iterable[SymbolAsyncIterator]() - } else if (iterable && iterable[SymbolIterator]) { - isAsync = false - iterator = iterable[SymbolIterator]() - } else { - throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable) - } - const readable = new Readable({ - objectMode: true, - highWaterMark: 1, - // TODO(ronag): What options should be allowed? - ...opts - }) + get signal() { + return this[INTERNALS$2].signal; + } - // Flag to protect against _read - // being called before last iteration completion. - let reading = false - readable._read = function () { - if (!reading) { - reading = true - next() - } - } - readable._destroy = function (error, cb) { - PromisePrototypeThen( - close(error), - () => process.nextTick(cb, error), - // nextTick is here in case cb throws - (e) => process.nextTick(cb, e || error) - ) - } - async function close(error) { - const hadError = error !== undefined && error !== null - const hasThrow = typeof iterator.throw === 'function' - if (hadError && hasThrow) { - const { value, done } = await iterator.throw(error) - await value - if (done) { - return - } - } - if (typeof iterator.return === 'function') { - const { value } = await iterator.return() - await value - } - } - async function next() { - for (;;) { - try { - const { value, done } = isAsync ? await iterator.next() : iterator.next() - if (done) { - readable.push(null) - } else { - const res = value && typeof value.then === 'function' ? await value : value - if (res === null) { - reading = false - throw new ERR_STREAM_NULL_VALUES() - } else if (readable.push(res)) { - continue - } else { - reading = false - } - } - } catch (err) { - readable.destroy(err) - } - break - } - } - return readable + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } } -module.exports = from +Body.mixIn(Request.prototype); -/***/ }), +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); -/***/ 26501: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); -"use strict"; +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } -const { ArrayIsArray, ObjectSetPrototypeOf } = __nccwpck_require__(62141) -const { EventEmitter: EE } = __nccwpck_require__(82361) -function Stream(opts) { - EE.call(this, opts) -} -ObjectSetPrototypeOf(Stream.prototype, EE.prototype) -ObjectSetPrototypeOf(Stream, EE) -Stream.prototype.pipe = function (dest, options) { - const source = this - function ondata(chunk) { - if (dest.writable && dest.write(chunk) === false && source.pause) { - source.pause() - } - } - source.on('data', ondata) - function ondrain() { - if (source.readable && source.resume) { - source.resume() - } - } - dest.on('drain', ondrain) + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } - // If the 'end' option is not supplied, dest.end() will be called when - // source gets the 'end' or 'close' events. Only dest.end() once. - if (!dest._isStdio && (!options || options.end !== false)) { - source.on('end', onend) - source.on('close', onclose) - } - let didOnEnd = false - function onend() { - if (didOnEnd) return - didOnEnd = true - dest.end() - } - function onclose() { - if (didOnEnd) return - didOnEnd = true - if (typeof dest.destroy === 'function') dest.destroy() - } + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } - // Don't leave dangling pipes when there are errors. - function onerror(er) { - cleanup() - if (EE.listenerCount(this, 'error') === 0) { - this.emit('error', er) - } - } - prependListener(source, 'error', onerror) - prependListener(dest, 'error', onerror) + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } - // Remove all the event listeners that were added. - function cleanup() { - source.removeListener('data', ondata) - dest.removeListener('drain', ondrain) - source.removeListener('end', onend) - source.removeListener('close', onclose) - source.removeListener('error', onerror) - dest.removeListener('error', onerror) - source.removeListener('end', cleanup) - source.removeListener('close', cleanup) - dest.removeListener('close', cleanup) - } - source.on('end', cleanup) - source.on('close', cleanup) - dest.on('close', cleanup) - dest.emit('pipe', source) + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } - // Allow for unix-like usage: A.pipe(B).pipe(C) - return dest -} -function prependListener(emitter, event, fn) { - // Sadly this is not cacheable as some libraries bundle their own - // event emitter implementation with them. - if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn) + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn) - else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn) - else emitter._events[event] = [fn, emitter._events[event]] + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); } -module.exports = { - Stream, - prependListener + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); } +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; -/***/ }), +const URL$1 = Url.URL || whatwgUrl.URL; -/***/ 71563: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; -"use strict"; +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; -const AbortController = globalThis.AbortController || (__nccwpck_require__(43747).AbortController) -const { - codes: { ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE }, - AbortError -} = __nccwpck_require__(49939) -const { validateAbortSignal, validateInteger, validateObject } = __nccwpck_require__(95529) -const kWeakHandler = (__nccwpck_require__(62141).Symbol)('kWeak') -const kResistStopPropagation = (__nccwpck_require__(62141).Symbol)('kResistStopPropagation') -const { finished } = __nccwpck_require__(89217) -const staticCompose = __nccwpck_require__(83913) -const { addAbortSignalNoValidate } = __nccwpck_require__(50275) -const { isWritable, isNodeStream } = __nccwpck_require__(46995) -const { deprecate } = __nccwpck_require__(32504) -const { - ArrayPrototypePush, - Boolean, - MathFloor, - Number, - NumberIsNaN, - Promise, - PromiseReject, - PromiseResolve, - PromisePrototypeThen, - Symbol -} = __nccwpck_require__(62141) -const kEmpty = Symbol('kEmpty') -const kEof = Symbol('kEof') -function compose(stream, options) { - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - if (isNodeStream(stream) && !isWritable(stream)) { - throw new ERR_INVALID_ARG_VALUE('stream', stream, 'must be writable') - } - const composedStream = staticCompose(this, stream) - if (options !== null && options !== undefined && options.signal) { - // Not validating as we already validated before - addAbortSignalNoValidate(options.signal, composedStream) - } - return composedStream -} -function map(fn, options) { - if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) - } - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - let concurrency = 1 - if ((options === null || options === undefined ? undefined : options.concurrency) != null) { - concurrency = MathFloor(options.concurrency) - } - let highWaterMark = concurrency - 1 - if ((options === null || options === undefined ? undefined : options.highWaterMark) != null) { - highWaterMark = MathFloor(options.highWaterMark) - } - validateInteger(concurrency, 'options.concurrency', 1) - validateInteger(highWaterMark, 'options.highWaterMark', 0) - highWaterMark += concurrency - return async function* map() { - const signal = (__nccwpck_require__(32504).AbortSignalAny)( - [options === null || options === undefined ? undefined : options.signal].filter(Boolean) - ) - const stream = this - const queue = [] - const signalOpt = { - signal - } - let next - let resume - let done = false - let cnt = 0 - function onCatch() { - done = true - afterItemProcessed() - } - function afterItemProcessed() { - cnt -= 1 - maybeResume() - } - function maybeResume() { - if (resume && !done && cnt < concurrency && queue.length < highWaterMark) { - resume() - resume = null - } - } - async function pump() { - try { - for await (let val of stream) { - if (done) { - return - } - if (signal.aborted) { - throw new AbortError() - } - try { - val = fn(val, signalOpt) - if (val === kEmpty) { - continue - } - val = PromiseResolve(val) - } catch (err) { - val = PromiseReject(err) - } - cnt += 1 - PromisePrototypeThen(val, afterItemProcessed, onCatch) - queue.push(val) - if (next) { - next() - next = null - } - if (!done && (queue.length >= highWaterMark || cnt >= concurrency)) { - await new Promise((resolve) => { - resume = resolve - }) - } - } - queue.push(kEof) - } catch (err) { - const val = PromiseReject(err) - PromisePrototypeThen(val, afterItemProcessed, onCatch) - queue.push(val) - } finally { - done = true - if (next) { - next() - next = null - } - } - } - pump() - try { - while (true) { - while (queue.length > 0) { - const val = await queue[0] - if (val === kEof) { - return - } - if (signal.aborted) { - throw new AbortError() - } - if (val !== kEmpty) { - yield val - } - queue.shift() - maybeResume() - } - await new Promise((resolve) => { - next = resolve - }) - } - } finally { - done = true - if (resume) { - resume() - resume = null - } - } - }.call(this) -} -function asIndexedPairs(options = undefined) { - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - return async function* asIndexedPairs() { - let index = 0 - for await (const val of this) { - var _options$signal - if ( - options !== null && - options !== undefined && - (_options$signal = options.signal) !== null && - _options$signal !== undefined && - _options$signal.aborted - ) { - throw new AbortError({ - cause: options.signal.reason - }) - } - yield [index++, val] - } - }.call(this) -} -async function some(fn, options = undefined) { - for await (const unused of filter.call(this, fn, options)) { - return true - } - return false -} -async function every(fn, options = undefined) { - if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) - } - // https://en.wikipedia.org/wiki/De_Morgan%27s_laws - return !(await some.call( - this, - async (...args) => { - return !(await fn(...args)) - }, - options - )) -} -async function find(fn, options) { - for await (const result of filter.call(this, fn, options)) { - return result - } - return undefined -} -async function forEach(fn, options) { - if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) - } - async function forEachFn(value, options) { - await fn(value, options) - return kEmpty - } - // eslint-disable-next-line no-unused-vars - for await (const unused of map.call(this, forEachFn, options)); -} -function filter(fn, options) { - if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) - } - async function filterFn(value, options) { - if (await fn(value, options)) { - return value - } - return kEmpty - } - return map.call(this, filterFn, options) -} +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; -// Specific to provide better error to reduce since the argument is only -// missing if the stream has no items in it - but the code is still appropriate -class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS { - constructor() { - super('reduce') - this.message = 'Reduce of an empty stream requires an initial value' - } -} -async function reduce(reducer, initialValue, options) { - var _options$signal2 - if (typeof reducer !== 'function') { - throw new ERR_INVALID_ARG_TYPE('reducer', ['Function', 'AsyncFunction'], reducer) - } - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - let hasInitialValue = arguments.length > 1 - if ( - options !== null && - options !== undefined && - (_options$signal2 = options.signal) !== null && - _options$signal2 !== undefined && - _options$signal2.aborted - ) { - const err = new AbortError(undefined, { - cause: options.signal.reason - }) - this.once('error', () => {}) // The error is already propagated - await finished(this.destroy(err)) - throw err - } - const ac = new AbortController() - const signal = ac.signal - if (options !== null && options !== undefined && options.signal) { - const opts = { - once: true, - [kWeakHandler]: this, - [kResistStopPropagation]: true - } - options.signal.addEventListener('abort', () => ac.abort(), opts) - } - let gotAnyItemFromStream = false - try { - for await (const value of this) { - var _options$signal3 - gotAnyItemFromStream = true - if ( - options !== null && - options !== undefined && - (_options$signal3 = options.signal) !== null && - _options$signal3 !== undefined && - _options$signal3.aborted - ) { - throw new AbortError() - } - if (!hasInitialValue) { - initialValue = value - hasInitialValue = true - } else { - initialValue = await reducer(initialValue, value, { - signal - }) - } - } - if (!gotAnyItemFromStream && !hasInitialValue) { - throw new ReduceAwareErrMissingArgs() - } - } finally { - ac.abort() - } - return initialValue -} -async function toArray(options) { - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - const result = [] - for await (const val of this) { - var _options$signal4 - if ( - options !== null && - options !== undefined && - (_options$signal4 = options.signal) !== null && - _options$signal4 !== undefined && - _options$signal4.aborted - ) { - throw new AbortError(undefined, { - cause: options.signal.reason - }) - } - ArrayPrototypePush(result, val) - } - return result -} -function flatMap(fn, options) { - const values = map.call(this, fn, options) - return async function* flatMap() { - for await (const val of values) { - yield* val - } - }.call(this) -} -function toIntegerOrInfinity(number) { - // We coerce here to align with the spec - // https://github.com/tc39/proposal-iterator-helpers/issues/169 - number = Number(number) - if (NumberIsNaN(number)) { - return 0 - } - if (number < 0) { - throw new ERR_OUT_OF_RANGE('number', '>= 0', number) - } - return number -} -function drop(number, options = undefined) { - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - number = toIntegerOrInfinity(number) - return async function* drop() { - var _options$signal5 - if ( - options !== null && - options !== undefined && - (_options$signal5 = options.signal) !== null && - _options$signal5 !== undefined && - _options$signal5.aborted - ) { - throw new AbortError() - } - for await (const val of this) { - var _options$signal6 - if ( - options !== null && - options !== undefined && - (_options$signal6 = options.signal) !== null && - _options$signal6 !== undefined && - _options$signal6.aborted - ) { - throw new AbortError() - } - if (number-- <= 0) { - yield val - } - } - }.call(this) -} -function take(number, options = undefined) { - if (options != null) { - validateObject(options, 'options') - } - if ((options === null || options === undefined ? undefined : options.signal) != null) { - validateAbortSignal(options.signal, 'options.signal') - } - number = toIntegerOrInfinity(number) - return async function* take() { - var _options$signal7 - if ( - options !== null && - options !== undefined && - (_options$signal7 = options.signal) !== null && - _options$signal7 !== undefined && - _options$signal7.aborted - ) { - throw new AbortError() - } - for await (const val of this) { - var _options$signal8 - if ( - options !== null && - options !== undefined && - (_options$signal8 = options.signal) !== null && - _options$signal8 !== undefined && - _options$signal8.aborted - ) { - throw new AbortError() - } - if (number-- > 0) { - yield val - } + return orig === dest; +}; - // Don't get another item from iterator in case we reached the end - if (number <= 0) { - return - } - } - }.call(this) -} -module.exports.streamReturningOperators = { - asIndexedPairs: deprecate(asIndexedPairs, 'readable.asIndexedPairs will be removed in a future version.'), - drop, - filter, - flatMap, - map, - take, - compose -} -module.exports.promiseReturningOperators = { - every, - forEach, - reduce, - toArray, - some, - find -} +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } -/***/ }), + Body.Promise = fetch.Promise; -/***/ 23046: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. + let response = null; + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + if (signal && signal.aborted) { + abort(); + return; + } -const { ObjectSetPrototypeOf } = __nccwpck_require__(62141) -module.exports = PassThrough -const Transform = __nccwpck_require__(84186) -ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype) -ObjectSetPrototypeOf(PassThrough, Transform) -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options) - Transform.call(this, options) -} -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk) -} + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + // send request + const req = send(options); + let reqTimeout; -/***/ }), + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } -/***/ 32361: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } -/* replacement start */ + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } -const process = __nccwpck_require__(54340) + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); -/* replacement end */ -// Ported from https://github.com/mafintosh/pump with -// permission from the author, Mathias Buus (@mafintosh). + if (response && response.body) { + destroyStream(response.body, err); + } -;('use strict') -const { ArrayIsArray, Promise, SymbolAsyncIterator, SymbolDispose } = __nccwpck_require__(62141) -const eos = __nccwpck_require__(89217) -const { once } = __nccwpck_require__(32504) -const destroyImpl = __nccwpck_require__(47493) -const Duplex = __nccwpck_require__(7280) -const { - aggregateTwoErrors, - codes: { - ERR_INVALID_ARG_TYPE, - ERR_INVALID_RETURN_VALUE, - ERR_MISSING_ARGS, - ERR_STREAM_DESTROYED, - ERR_STREAM_PREMATURE_CLOSE - }, - AbortError -} = __nccwpck_require__(49939) -const { validateFunction, validateAbortSignal } = __nccwpck_require__(95529) -const { - isIterable, - isReadable, - isReadableNodeStream, - isNodeStream, - isTransformStream, - isWebStream, - isReadableStream, - isReadableFinished -} = __nccwpck_require__(46995) -const AbortController = globalThis.AbortController || (__nccwpck_require__(43747).AbortController) -let PassThrough -let Readable -let addAbortListener -function destroyer(stream, reading, writing) { - let finished = false - stream.on('close', () => { - finished = true - }) - const cleanup = eos( - stream, - { - readable: reading, - writable: writing - }, - (err) => { - finished = !err - } - ) - return { - destroy: (err) => { - if (finished) return - finished = true - destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe')) - }, - cleanup - } + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); } -function popCallback(streams) { - // Streams should never be an empty array. It should always contain at least - // a single stream. Therefore optimize for the average case instead of - // checking for length === 0 as well. - validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]') - return streams.pop() +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); } -function makeAsyncIterable(val) { - if (isIterable(val)) { - return val - } else if (isReadableNodeStream(val)) { - // Legacy streams are not Iterable. - return fromReadable(val) - } - throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], val) + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } } -async function* fromReadable(val) { - if (!Readable) { - Readable = __nccwpck_require__(14252) + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports["default"] = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; +exports.AbortError = AbortError; + + +/***/ }), + +/***/ 54882: +/***/ ((module) => { + +/*! + * normalize-path + * + * Copyright (c) 2014-2018, Jon Schlinkert. + * Released under the MIT License. + */ + +module.exports = function(path, stripTrailing) { + if (typeof path !== 'string') { + throw new TypeError('expected path to be a string'); } - yield* Readable.prototype[SymbolAsyncIterator].call(val) -} -async function pumpToNode(iterable, writable, finish, { end }) { - let error - let onresolve = null - const resume = (err) => { - if (err) { - error = err - } - if (onresolve) { - const callback = onresolve - onresolve = null - callback() + + if (path === '\\' || path === '/') return '/'; + + var len = path.length; + if (len <= 1) return path; + + // ensure that win32 namespaces has two leading slashes, so that the path is + // handled properly by the win32 version of path.parse() after being normalized + // https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces + var prefix = ''; + if (len > 4 && path[3] === '\\') { + var ch = path[2]; + if ((ch === '?' || ch === '.') && path.slice(0, 2) === '\\\\') { + path = path.slice(2); + prefix = '//'; } } - const wait = () => - new Promise((resolve, reject) => { - if (error) { - reject(error) - } else { - onresolve = () => { - if (error) { - reject(error) - } else { - resolve() - } - } - } - }) - writable.on('drain', resume) - const cleanup = eos( - writable, - { - readable: false + + var segs = path.split(/[/\\]+/); + if (stripTrailing !== false && segs[segs.length - 1] === '') { + segs.pop(); + } + return prefix + segs.join('/'); +}; + + +/***/ }), + +/***/ 88666: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(99002) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) }, - resume - ) - try { - if (writable.writableNeedDrain) { - await wait() - } - for await (const chunk of iterable) { - if (!writable.write(chunk)) { - await wait() - } - } - if (end) { - writable.end() - await wait() - } - finish() - } catch (err) { - finish(error !== err ? aggregateTwoErrors(error, err) : err) - } finally { - cleanup() - writable.off('drain', resume) + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) } + f.called = false + return f } -async function pumpToWeb(readable, writable, finish, { end }) { - if (isTransformStream(writable)) { - writable = writable.writable - } - // https://streams.spec.whatwg.org/#example-manual-write-with-backpressure - const writer = writable.getWriter() - try { - for await (const chunk of readable) { - await writer.ready - writer.write(chunk).catch(() => {}) - } - await writer.ready - if (end) { - await writer.close() - } - finish() - } catch (err) { - try { - await writer.abort(err) - finish(err) - } catch (err) { - finish(err) - } + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f } -function pipeline(...streams) { - return pipelineImpl(streams, once(popCallback(streams))) + + +/***/ }), + +/***/ 22927: +/***/ ((module) => { + +"use strict"; + + +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process } -function pipelineImpl(streams, callback, opts) { - if (streams.length === 1 && ArrayIsArray(streams[0])) { - streams = streams[0] - } - if (streams.length < 2) { - throw new ERR_MISSING_ARGS('streams') - } - const ac = new AbortController() - const signal = ac.signal - const outerSignal = opts === null || opts === undefined ? undefined : opts.signal - // Need to cleanup event listeners if last stream is readable - // https://github.com/nodejs/node/issues/35452 - const lastStreamCleanup = [] - validateAbortSignal(outerSignal, 'options.signal') - function abort() { - finishImpl(new AbortError()) - } - addAbortListener = addAbortListener || (__nccwpck_require__(32504).addAbortListener) - let disposable - if (outerSignal) { - disposable = addAbortListener(outerSignal, abort) - } - let error - let value - const destroys = [] - let finishCount = 0 - function finish(err) { - finishImpl(err, --finishCount === 0) +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); } - function finishImpl(err, final) { - var _disposable - if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) { - error = err - } - if (!error && !final) { - return - } - while (destroys.length) { - destroys.shift()(error) - } - ;(_disposable = disposable) === null || _disposable === undefined ? undefined : _disposable[SymbolDispose]() - ac.abort() - if (final) { - if (!error) { - lastStreamCleanup.forEach((fn) => fn()) - } - process.nextTick(callback, error, value) + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); } - let ret - for (let i = 0; i < streams.length; i++) { - const stream = streams[i] - const reading = i < streams.length - 1 - const writing = i > 0 - const end = reading || (opts === null || opts === undefined ? undefined : opts.end) !== false - const isLastStream = i === streams.length - 1 - if (isNodeStream(stream)) { - if (end) { - const { destroy, cleanup } = destroyer(stream, reading, writing) - destroys.push(destroy) - if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(cleanup) - } - } +} - // Catch stream errors that occur after pipe/pump has completed. - function onError(err) { - if (err && err.name !== 'AbortError' && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { - finish(err) - } - } - stream.on('error', onError) - if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(() => { - stream.removeListener('error', onError) - }) - } - } - if (i === 0) { - if (typeof stream === 'function') { - ret = stream({ - signal - }) - if (!isIterable(ret)) { - throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret) - } - } else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) { - ret = stream - } else { - ret = Duplex.from(stream) - } - } else if (typeof stream === 'function') { - if (isTransformStream(ret)) { - var _ret - ret = makeAsyncIterable((_ret = ret) === null || _ret === undefined ? undefined : _ret.readable) - } else { - ret = makeAsyncIterable(ret) - } - ret = stream(ret, { - signal - }) - if (reading) { - if (!isIterable(ret, true)) { - throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret) - } - } else { - var _ret2 - if (!PassThrough) { - PassThrough = __nccwpck_require__(23046) - } - // If the last argument to pipeline is not a stream - // we must create a proxy stream so that pipeline(...) - // always returns a stream which can be further - // composed through `.pipe(stream)`. - const pt = new PassThrough({ - objectMode: true - }) +/***/ }), - // Handle Promises/A+ spec, `then` could be a getter that throws on - // second use. - const then = (_ret2 = ret) === null || _ret2 === undefined ? undefined : _ret2.then - if (typeof then === 'function') { - finishCount++ - then.call( - ret, - (val) => { - value = val - if (val != null) { - pt.write(val) - } - if (end) { - pt.end() - } - process.nextTick(finish) - }, - (err) => { - pt.destroy(err) - process.nextTick(finish, err) - } - ) - } else if (isIterable(ret, true)) { - finishCount++ - pumpToNode(ret, pt, finish, { - end - }) - } else if (isReadableStream(ret) || isTransformStream(ret)) { - const toRead = ret.readable || ret - finishCount++ - pumpToNode(toRead, pt, finish, { - end - }) - } else { - throw new ERR_INVALID_RETURN_VALUE('AsyncIterable or Promise', 'destination', ret) - } - ret = pt - const { destroy, cleanup } = destroyer(ret, false, true) - destroys.push(destroy) - if (isLastStream) { - lastStreamCleanup.push(cleanup) - } - } - } else if (isNodeStream(stream)) { - if (isReadableNodeStream(ret)) { - finishCount += 2 - const cleanup = pipe(ret, stream, finish, { - end - }) - if (isReadable(stream) && isLastStream) { - lastStreamCleanup.push(cleanup) - } - } else if (isTransformStream(ret) || isReadableStream(ret)) { - const toRead = ret.readable || ret - finishCount++ - pumpToNode(toRead, stream, finish, { - end - }) - } else if (isIterable(ret)) { - finishCount++ - pumpToNode(ret, stream, finish, { - end - }) - } else { - throw new ERR_INVALID_ARG_TYPE( - 'val', - ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'], - ret - ) - } - ret = stream - } else if (isWebStream(stream)) { - if (isReadableNodeStream(ret)) { - finishCount++ - pumpToWeb(makeAsyncIterable(ret), stream, finish, { - end - }) - } else if (isReadableStream(ret) || isIterable(ret)) { - finishCount++ - pumpToWeb(ret, stream, finish, { - end - }) - } else if (isTransformStream(ret)) { - finishCount++ - pumpToWeb(ret.readable, stream, finish, { - end - }) - } else { - throw new ERR_INVALID_ARG_TYPE( - 'val', - ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'], - ret - ) - } - ret = stream - } else { - ret = Duplex.from(stream) - } +/***/ 76341: +/***/ ((module) => { + +// for now just expose the builtin process global from node.js +module.exports = global.process; + + +/***/ }), + +/***/ 27229: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var parseUrl = (__nccwpck_require__(57310).parse); + +var DEFAULT_PORTS = { + ftp: 21, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443, +}; + +var stringEndsWith = String.prototype.endsWith || function(s) { + return s.length <= this.length && + this.indexOf(s, this.length - s.length) !== -1; +}; + +/** + * @param {string|object} url - The URL, or the result from url.parse. + * @return {string} The URL of the proxy that should handle the request to the + * given URL. If no proxy is set, this will be an empty string. + */ +function getProxyForUrl(url) { + var parsedUrl = typeof url === 'string' ? parseUrl(url) : url || {}; + var proto = parsedUrl.protocol; + var hostname = parsedUrl.host; + var port = parsedUrl.port; + if (typeof hostname !== 'string' || !hostname || typeof proto !== 'string') { + return ''; // Don't proxy URLs without a valid scheme or host. } - if ( - (signal !== null && signal !== undefined && signal.aborted) || - (outerSignal !== null && outerSignal !== undefined && outerSignal.aborted) - ) { - process.nextTick(abort) + + proto = proto.split(':', 1)[0]; + // Stripping ports in this way instead of using parsedUrl.hostname to make + // sure that the brackets around IPv6 addresses are kept. + hostname = hostname.replace(/:\d*$/, ''); + port = parseInt(port) || DEFAULT_PORTS[proto] || 0; + if (!shouldProxy(hostname, port)) { + return ''; // Don't proxy URLs that match NO_PROXY. } - return ret + + var proxy = + getEnv('npm_config_' + proto + '_proxy') || + getEnv(proto + '_proxy') || + getEnv('npm_config_proxy') || + getEnv('all_proxy'); + if (proxy && proxy.indexOf('://') === -1) { + // Missing scheme in proxy, default to the requested URL's scheme. + proxy = proto + '://' + proxy; + } + return proxy; } -function pipe(src, dst, finish, { end }) { - let ended = false - dst.on('close', () => { - if (!ended) { - // Finish if the destination closes before the source has completed. - finish(new ERR_STREAM_PREMATURE_CLOSE()) - } - }) - src.pipe(dst, { - end: false - }) // If end is true we already will have a listener to end dst. - if (end) { - // Compat. Before node v10.12.0 stdio used to throw an error so - // pipe() did/does not end() stdio destinations. - // Now they allow it but "secretly" don't close the underlying fd. +/** + * Determines whether a given URL should be proxied. + * + * @param {string} hostname - The host name of the URL. + * @param {number} port - The effective port of the URL. + * @returns {boolean} Whether the given URL should be proxied. + * @private + */ +function shouldProxy(hostname, port) { + var NO_PROXY = + (getEnv('npm_config_no_proxy') || getEnv('no_proxy')).toLowerCase(); + if (!NO_PROXY) { + return true; // Always proxy if NO_PROXY is not set. + } + if (NO_PROXY === '*') { + return false; // Never proxy if wildcard is set. + } - function endFn() { - ended = true - dst.end() + return NO_PROXY.split(/[,\s]/).every(function(proxy) { + if (!proxy) { + return true; // Skip zero-length hosts. } - if (isReadableFinished(src)) { - // End the destination if the source has already ended. - process.nextTick(endFn) - } else { - src.once('end', endFn) + var parsedProxy = proxy.match(/^(.+):(\d+)$/); + var parsedProxyHostname = parsedProxy ? parsedProxy[1] : proxy; + var parsedProxyPort = parsedProxy ? parseInt(parsedProxy[2]) : 0; + if (parsedProxyPort && parsedProxyPort !== port) { + return true; // Skip if ports don't match. } - } else { - finish() - } - eos( - src, - { - readable: true, - writable: false - }, - (err) => { - const rState = src._readableState - if ( - err && - err.code === 'ERR_STREAM_PREMATURE_CLOSE' && - rState && - rState.ended && - !rState.errored && - !rState.errorEmitted - ) { - // Some readable streams will emit 'close' before 'end'. However, since - // this is on the readable side 'end' should still be emitted if the - // stream has been ended and no error emitted. This should be allowed in - // favor of backwards compatibility. Since the stream is piped to a - // destination this should not result in any observable difference. - // We don't need to check if this is a writable premature close since - // eos will only fail with premature close on the reading side for - // duplex streams. - src.once('end', finish).once('error', finish) - } else { - finish(err) - } + + if (!/^[.*]/.test(parsedProxyHostname)) { + // No wildcards, so stop proxying if there is an exact match. + return hostname !== parsedProxyHostname; } - ) - return eos( - dst, - { - readable: false, - writable: true - }, - finish - ) + + if (parsedProxyHostname.charAt(0) === '*') { + // Remove leading wildcard. + parsedProxyHostname = parsedProxyHostname.slice(1); + } + // Stop proxying if the hostname ends with the no_proxy host. + return !stringEndsWith.call(hostname, parsedProxyHostname); + }); } -module.exports = { - pipelineImpl, - pipeline + +/** + * Get the value for an environment variable. + * + * @param {string} key - The name of the environment variable. + * @return {string} The value of the environment variable. + * @private + */ +function getEnv(key) { + return process.env[key.toLowerCase()] || process.env[key.toUpperCase()] || ''; } +exports.getProxyForUrl = getProxyForUrl; + /***/ }), -/***/ 14252: +/***/ 99702: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/* replacement start */ +module.exports = (typeof process !== 'undefined' && typeof process.nextTick === 'function') + ? process.nextTick.bind(process) + : __nccwpck_require__(74799) -const process = __nccwpck_require__(54340) -/* replacement end */ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +/***/ }), -;('use strict') -const { - ArrayPrototypeIndexOf, - NumberIsInteger, - NumberIsNaN, - NumberParseInt, - ObjectDefineProperties, - ObjectKeys, - ObjectSetPrototypeOf, - Promise, - SafeSet, - SymbolAsyncDispose, - SymbolAsyncIterator, - Symbol -} = __nccwpck_require__(62141) -module.exports = Readable -Readable.ReadableState = ReadableState -const { EventEmitter: EE } = __nccwpck_require__(82361) -const { Stream, prependListener } = __nccwpck_require__(26501) -const { Buffer } = __nccwpck_require__(14300) -const { addAbortSignal } = __nccwpck_require__(50275) -const eos = __nccwpck_require__(89217) -let debug = (__nccwpck_require__(32504).debuglog)('stream', (fn) => { - debug = fn -}) -const BufferList = __nccwpck_require__(75501) -const destroyImpl = __nccwpck_require__(47493) -const { getHighWaterMark, getDefaultHighWaterMark } = __nccwpck_require__(89025) -const { - aggregateTwoErrors, - codes: { - ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED, - ERR_OUT_OF_RANGE, - ERR_STREAM_PUSH_AFTER_EOF, - ERR_STREAM_UNSHIFT_AFTER_END_EVENT - }, - AbortError -} = __nccwpck_require__(49939) -const { validateObject } = __nccwpck_require__(95529) -const kPaused = Symbol('kPaused') -const { StringDecoder } = __nccwpck_require__(71576) -const from = __nccwpck_require__(42725) -ObjectSetPrototypeOf(Readable.prototype, Stream.prototype) -ObjectSetPrototypeOf(Readable, Stream) -const nop = () => {} -const { errorOrDestroy } = destroyImpl -const kObjectMode = 1 << 0 -const kEnded = 1 << 1 -const kEndEmitted = 1 << 2 -const kReading = 1 << 3 -const kConstructed = 1 << 4 -const kSync = 1 << 5 -const kNeedReadable = 1 << 6 -const kEmittedReadable = 1 << 7 -const kReadableListening = 1 << 8 -const kResumeScheduled = 1 << 9 -const kErrorEmitted = 1 << 10 -const kEmitClose = 1 << 11 -const kAutoDestroy = 1 << 12 -const kDestroyed = 1 << 13 -const kClosed = 1 << 14 -const kCloseEmitted = 1 << 15 -const kMultiAwaitDrain = 1 << 16 -const kReadingMore = 1 << 17 -const kDataEmitted = 1 << 18 +/***/ 74799: +/***/ ((module) => { -// TODO(benjamingr) it is likely slower to do it this way than with free functions -function makeBitMapDescriptor(bit) { - return { - enumerable: false, - get() { - return (this.state & bit) !== 0 - }, - set(value) { - if (value) this.state |= bit - else this.state &= ~bit - } - } -} -ObjectDefineProperties(ReadableState.prototype, { - objectMode: makeBitMapDescriptor(kObjectMode), - ended: makeBitMapDescriptor(kEnded), - endEmitted: makeBitMapDescriptor(kEndEmitted), - reading: makeBitMapDescriptor(kReading), - // Stream is still being constructed and cannot be - // destroyed until construction finished or failed. - // Async construction is opt in, therefore we start as - // constructed. - constructed: makeBitMapDescriptor(kConstructed), - // A flag to be able to tell if the event 'readable'/'data' is emitted - // immediately, or on a later tick. We set this to true at first, because - // any actions that shouldn't happen until "later" should generally also - // not happen before the first read call. - sync: makeBitMapDescriptor(kSync), - // Whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - needReadable: makeBitMapDescriptor(kNeedReadable), - emittedReadable: makeBitMapDescriptor(kEmittedReadable), - readableListening: makeBitMapDescriptor(kReadableListening), - resumeScheduled: makeBitMapDescriptor(kResumeScheduled), - // True if the error was already emitted and should not be thrown again. - errorEmitted: makeBitMapDescriptor(kErrorEmitted), - emitClose: makeBitMapDescriptor(kEmitClose), - autoDestroy: makeBitMapDescriptor(kAutoDestroy), - // Has it been destroyed. - destroyed: makeBitMapDescriptor(kDestroyed), - // Indicates whether the stream has finished destroying. - closed: makeBitMapDescriptor(kClosed), - // True if close has been emitted or would have been emitted - // depending on emitClose. - closeEmitted: makeBitMapDescriptor(kCloseEmitted), - multiAwaitDrain: makeBitMapDescriptor(kMultiAwaitDrain), - // If true, a maybeReadMore has been scheduled. - readingMore: makeBitMapDescriptor(kReadingMore), - dataEmitted: makeBitMapDescriptor(kDataEmitted) -}) -function ReadableState(options, stream, isDuplex) { - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream. - // These options can be provided separately as readableXXX and writableXXX. - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof __nccwpck_require__(7280) - - // Bit map field to store ReadableState more effciently with 1 bit per field - // instead of a V8 slot per field. - this.state = kEmitClose | kAutoDestroy | kConstructed | kSync - // Object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away. - if (options && options.objectMode) this.state |= kObjectMode - if (isDuplex && options && options.readableObjectMode) this.state |= kObjectMode +module.exports = typeof queueMicrotask === 'function' ? queueMicrotask : (fn) => Promise.resolve().then(fn) - // The point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - this.highWaterMark = options - ? getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex) - : getDefaultHighWaterMark(false) - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift(). - this.buffer = new BufferList() - this.length = 0 - this.pipes = [] - this.flowing = null - this[kPaused] = null +/***/ }), - // Should close be emitted on destroy. Defaults to true. - if (options && options.emitClose === false) this.state &= ~kEmitClose +/***/ 69832: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Should .destroy() be called after 'end' (and potentially 'finish'). - if (options && options.autoDestroy === false) this.state &= ~kAutoDestroy +"use strict"; - // Indicates whether the stream has errored. When true no further - // _read calls, 'data' or 'readable' events should occur. This is needed - // since when autoDestroy is disabled we need a way to tell whether the - // stream has failed. - this.errored = null - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = (options && options.defaultEncoding) || 'utf8' +const { SymbolDispose } = __nccwpck_require__(62646) +const { AbortError, codes } = __nccwpck_require__(72184) +const { isNodeStream, isWebStream, kControllerErrorFunction } = __nccwpck_require__(58650) +const eos = __nccwpck_require__(51604) +const { ERR_INVALID_ARG_TYPE } = codes +let addAbortListener - // Ref the piped dest which we need a drain event on it - // type: null | Writable | Set. - this.awaitDrainWriters = null - this.decoder = null - this.encoding = null - if (options && options.encoding) { - this.decoder = new StringDecoder(options.encoding) - this.encoding = options.encoding +// This method is inlined here for readable-stream +// It also does not allow for signal to not exist on the stream +// https://github.com/nodejs/node/pull/36061#discussion_r533718029 +const validateAbortSignal = (signal, name) => { + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal) } } -function Readable(options) { - if (!(this instanceof Readable)) return new Readable(options) - - // Checking for a Stream.Duplex instance is faster here instead of inside - // the ReadableState constructor, at least with V8 6.5. - const isDuplex = this instanceof __nccwpck_require__(7280) - this._readableState = new ReadableState(options, this, isDuplex) - if (options) { - if (typeof options.read === 'function') this._read = options.read - if (typeof options.destroy === 'function') this._destroy = options.destroy - if (typeof options.construct === 'function') this._construct = options.construct - if (options.signal && !isDuplex) addAbortSignal(options.signal, this) +module.exports.addAbortSignal = function addAbortSignal(signal, stream) { + validateAbortSignal(signal, 'signal') + if (!isNodeStream(stream) && !isWebStream(stream)) { + throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream) } - Stream.call(this, options) - destroyImpl.construct(this, () => { - if (this._readableState.needReadable) { - maybeReadMore(this, this._readableState) - } - }) -} -Readable.prototype.destroy = destroyImpl.destroy -Readable.prototype._undestroy = destroyImpl.undestroy -Readable.prototype._destroy = function (err, cb) { - cb(err) -} -Readable.prototype[EE.captureRejectionSymbol] = function (err) { - this.destroy(err) + return module.exports.addAbortSignalNoValidate(signal, stream) } -Readable.prototype[SymbolAsyncDispose] = function () { - let error - if (!this.destroyed) { - error = this.readableEnded ? null : new AbortError() - this.destroy(error) +module.exports.addAbortSignalNoValidate = function (signal, stream) { + if (typeof signal !== 'object' || !('aborted' in signal)) { + return stream } - return new Promise((resolve, reject) => eos(this, (err) => (err && err !== error ? reject(err) : resolve(null)))) + const onAbort = isNodeStream(stream) + ? () => { + stream.destroy( + new AbortError(undefined, { + cause: signal.reason + }) + ) + } + : () => { + stream[kControllerErrorFunction]( + new AbortError(undefined, { + cause: signal.reason + }) + ) + } + if (signal.aborted) { + onAbort() + } else { + addAbortListener = addAbortListener || (__nccwpck_require__(5915).addAbortListener) + const disposable = addAbortListener(signal, onAbort) + eos(stream, disposable[SymbolDispose]) + } + return stream } -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - return readableAddChunk(this, chunk, encoding, false) -} -// Unshift should *always* be something directly out of read(). -Readable.prototype.unshift = function (chunk, encoding) { - return readableAddChunk(this, chunk, encoding, true) -} -function readableAddChunk(stream, chunk, encoding, addToFront) { - debug('readableAddChunk', chunk) - const state = stream._readableState - let err - if ((state.state & kObjectMode) === 0) { - if (typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding - if (state.encoding !== encoding) { - if (addToFront && state.encoding) { - // When unshifting, if state.encoding is set, we have to save - // the string in the BufferList with the state encoding. - chunk = Buffer.from(chunk, encoding).toString(state.encoding) - } else { - chunk = Buffer.from(chunk, encoding) - encoding = '' - } - } - } else if (chunk instanceof Buffer) { - encoding = '' - } else if (Stream._isUint8Array(chunk)) { - chunk = Stream._uint8ArrayToBuffer(chunk) - encoding = '' - } else if (chunk != null) { - err = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk) +/***/ }), + +/***/ 92726: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { StringPrototypeSlice, SymbolIterator, TypedArrayPrototypeSet, Uint8Array } = __nccwpck_require__(62646) +const { Buffer } = __nccwpck_require__(14300) +const { inspect } = __nccwpck_require__(5915) +module.exports = class BufferList { + constructor() { + this.head = null + this.tail = null + this.length = 0 + } + push(v) { + const entry = { + data: v, + next: null } + if (this.length > 0) this.tail.next = entry + else this.head = entry + this.tail = entry + ++this.length } - if (err) { - errorOrDestroy(stream, err) - } else if (chunk === null) { - state.state &= ~kReading - onEofChunk(stream, state) - } else if ((state.state & kObjectMode) !== 0 || (chunk && chunk.length > 0)) { - if (addToFront) { - if ((state.state & kEndEmitted) !== 0) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()) - else if (state.destroyed || state.errored) return false - else addChunk(stream, state, chunk, true) - } else if (state.ended) { - errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()) - } else if (state.destroyed || state.errored) { - return false - } else { - state.state &= ~kReading - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk) - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false) - else maybeReadMore(stream, state) - } else { - addChunk(stream, state, chunk, false) - } + unshift(v) { + const entry = { + data: v, + next: this.head } - } else if (!addToFront) { - state.state &= ~kReading - maybeReadMore(stream, state) + if (this.length === 0) this.tail = entry + this.head = entry + ++this.length } - - // We can push more data if we are below the highWaterMark. - // Also, if we have no data yet, we can stand some more bytes. - // This is to work around cases where hwm=0, such as the repl. - return !state.ended && (state.length < state.highWaterMark || state.length === 0) -} -function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync && stream.listenerCount('data') > 0) { - // Use the guard to avoid creating `Set()` repeatedly - // when we have multiple pipes. - if ((state.state & kMultiAwaitDrain) !== 0) { - state.awaitDrainWriters.clear() - } else { - state.awaitDrainWriters = null + shift() { + if (this.length === 0) return + const ret = this.head.data + if (this.length === 1) this.head = this.tail = null + else this.head = this.head.next + --this.length + return ret + } + clear() { + this.head = this.tail = null + this.length = 0 + } + join(s) { + if (this.length === 0) return '' + let p = this.head + let ret = '' + p.data + while ((p = p.next) !== null) ret += s + p.data + return ret + } + concat(n) { + if (this.length === 0) return Buffer.alloc(0) + const ret = Buffer.allocUnsafe(n >>> 0) + let p = this.head + let i = 0 + while (p) { + TypedArrayPrototypeSet(ret, p.data, i) + i += p.data.length + p = p.next } - state.dataEmitted = true - stream.emit('data', chunk) - } else { - // Update the buffer info. - state.length += state.objectMode ? 1 : chunk.length - if (addToFront) state.buffer.unshift(chunk) - else state.buffer.push(chunk) - if ((state.state & kNeedReadable) !== 0) emitReadable(stream) + return ret } - maybeReadMore(stream, state) -} -Readable.prototype.isPaused = function () { - const state = this._readableState - return state[kPaused] === true || state.flowing === false -} -// Backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - const decoder = new StringDecoder(enc) - this._readableState.decoder = decoder - // If setEncoding(null), decoder.encoding equals utf8. - this._readableState.encoding = this._readableState.decoder.encoding - const buffer = this._readableState.buffer - // Iterate over current buffer to convert already stored Buffers: - let content = '' - for (const data of buffer) { - content += decoder.write(data) + // Consumes a specified amount of bytes or characters from the buffered data. + consume(n, hasStrings) { + const data = this.head.data + if (n < data.length) { + // `slice` is the same for buffers and strings. + const slice = data.slice(0, n) + this.head.data = data.slice(n) + return slice + } + if (n === data.length) { + // First chunk is a perfect match. + return this.shift() + } + // Result spans more than one buffer. + return hasStrings ? this._getString(n) : this._getBuffer(n) } - buffer.clear() - if (content !== '') buffer.push(content) - this._readableState.length = content.length - return this -} - -// Don't raise the hwm > 1GB. -const MAX_HWM = 0x40000000 -function computeNewHighWaterMark(n) { - if (n > MAX_HWM) { - throw new ERR_OUT_OF_RANGE('size', '<= 1GiB', n) - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts. - n-- - n |= n >>> 1 - n |= n >>> 2 - n |= n >>> 4 - n |= n >>> 8 - n |= n >>> 16 - n++ + first() { + return this.head.data } - return n -} - -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || (state.length === 0 && state.ended)) return 0 - if ((state.state & kObjectMode) !== 0) return 1 - if (NumberIsNaN(n)) { - // Only flow one buffer at a time. - if (state.flowing && state.length) return state.buffer.first().length - return state.length + *[SymbolIterator]() { + for (let p = this.head; p; p = p.next) { + yield p.data + } } - if (n <= state.length) return n - return state.ended ? state.length : 0 -} -// You can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n) - // Same as parseInt(undefined, 10), however V8 7.3 performance regressed - // in this scenario, so we are doing it manually. - if (n === undefined) { - n = NaN - } else if (!NumberIsInteger(n)) { - n = NumberParseInt(n, 10) + // Consumes a specified amount of characters from the buffered data. + _getString(n) { + let ret = '' + let p = this.head + let c = 0 + do { + const str = p.data + if (n > str.length) { + ret += str + n -= str.length + } else { + if (n === str.length) { + ret += str + ++c + if (p.next) this.head = p.next + else this.head = this.tail = null + } else { + ret += StringPrototypeSlice(str, 0, n) + this.head = p + p.data = StringPrototypeSlice(str, n) + } + break + } + ++c + } while ((p = p.next) !== null) + this.length -= c + return ret } - const state = this._readableState - const nOrig = n - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n) - if (n !== 0) state.state &= ~kEmittedReadable - - // If we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if ( - n === 0 && - state.needReadable && - ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended) - ) { - debug('read: emitReadable', state.length, state.ended) - if (state.length === 0 && state.ended) endReadable(this) - else emitReadable(this) - return null + // Consumes a specified amount of bytes from the buffered data. + _getBuffer(n) { + const ret = Buffer.allocUnsafe(n) + const retLen = n + let p = this.head + let c = 0 + do { + const buf = p.data + if (n > buf.length) { + TypedArrayPrototypeSet(ret, buf, retLen - n) + n -= buf.length + } else { + if (n === buf.length) { + TypedArrayPrototypeSet(ret, buf, retLen - n) + ++c + if (p.next) this.head = p.next + else this.head = this.tail = null + } else { + TypedArrayPrototypeSet(ret, new Uint8Array(buf.buffer, buf.byteOffset, n), retLen - n) + this.head = p + p.data = buf.slice(n) + } + break + } + ++c + } while ((p = p.next) !== null) + this.length -= c + return ret } - n = howMuchToRead(n, state) - // If we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this) - return null + // Make sure the linked list only shows the minimal necessary information. + [Symbol.for('nodejs.util.inspect.custom')](_, options) { + return inspect(this, { + ...options, + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + }) } +} - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - // if we need a readable event, then we need to do some reading. - let doRead = (state.state & kNeedReadable) !== 0 - debug('need readable', doRead) +/***/ }), - // If we currently have less than the highWaterMark, then also read some. - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true - debug('length less than watermark', doRead) - } +/***/ 70724: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // However, if we've ended, then there's no point, if we're already - // reading, then it's unnecessary, if we're constructing we have to wait, - // and if we're destroyed or errored, then it's not allowed, - if (state.ended || state.reading || state.destroyed || state.errored || !state.constructed) { - doRead = false - debug('reading, ended or constructing', doRead) - } else if (doRead) { - debug('do read') - state.state |= kReading | kSync - // If the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.state |= kNeedReadable +"use strict"; - // Call internal read method - try { - this._read(state.highWaterMark) - } catch (err) { - errorOrDestroy(this, err) - } - state.state &= ~kSync - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state) +const { pipeline } = __nccwpck_require__(77866) +const Duplex = __nccwpck_require__(67799) +const { destroyer } = __nccwpck_require__(80064) +const { + isNodeStream, + isReadable, + isWritable, + isWebStream, + isTransformStream, + isWritableStream, + isReadableStream +} = __nccwpck_require__(58650) +const { + AbortError, + codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS } +} = __nccwpck_require__(72184) +const eos = __nccwpck_require__(51604) +module.exports = function compose(...streams) { + if (streams.length === 0) { + throw new ERR_MISSING_ARGS('streams') } - let ret - if (n > 0) ret = fromList(n, state) - else ret = null - if (ret === null) { - state.needReadable = state.length <= state.highWaterMark - n = 0 - } else { - state.length -= n - if (state.multiAwaitDrain) { - state.awaitDrainWriters.clear() - } else { - state.awaitDrainWriters = null - } + if (streams.length === 1) { + return Duplex.from(streams[0]) } - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true - - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this) + const orgStreams = [...streams] + if (typeof streams[0] === 'function') { + streams[0] = Duplex.from(streams[0]) } - if (ret !== null && !state.errorEmitted && !state.closeEmitted) { - state.dataEmitted = true - this.emit('data', ret) + if (typeof streams[streams.length - 1] === 'function') { + const idx = streams.length - 1 + streams[idx] = Duplex.from(streams[idx]) } - return ret -} -function onEofChunk(stream, state) { - debug('onEofChunk') - if (state.ended) return - if (state.decoder) { - const chunk = state.decoder.end() - if (chunk && chunk.length) { - state.buffer.push(chunk) - state.length += state.objectMode ? 1 : chunk.length + for (let n = 0; n < streams.length; ++n) { + if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) { + // TODO(ronag): Add checks for non streams. + continue } - } - state.ended = true - if (state.sync) { - // If we are sync, wait until next tick to emit the data. - // Otherwise we risk emitting data in the flow() - // the readable code triggers during a read() call. - emitReadable(stream) - } else { - // Emit 'readable' now to make sure it gets picked up. - state.needReadable = false - state.emittedReadable = true - // We have to emit readable now that we are EOF. Modules - // in the ecosystem (e.g. dicer) rely on this event being sync. - emitReadable_(stream) - } -} - -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - const state = stream._readableState - debug('emitReadable', state.needReadable, state.emittedReadable) - state.needReadable = false - if (!state.emittedReadable) { - debug('emitReadable', state.flowing) - state.emittedReadable = true - process.nextTick(emitReadable_, stream) - } -} -function emitReadable_(stream) { - const state = stream._readableState - debug('emitReadable_', state.destroyed, state.length, state.ended) - if (!state.destroyed && !state.errored && (state.length || state.ended)) { - stream.emit('readable') - state.emittedReadable = false - } - - // The stream needs another readable event if: - // 1. It is not flowing, as the flow mechanism will take - // care of it. - // 2. It is not ended. - // 3. It is below the highWaterMark, so we can schedule - // another readable later. - state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark - flow(stream) -} - -// At this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore && state.constructed) { - state.readingMore = true - process.nextTick(maybeReadMore_, stream, state) - } -} -function maybeReadMore_(stream, state) { - // Attempt to read more data if we should. - // - // The conditions for reading more data are (one of): - // - Not enough data buffered (state.length < state.highWaterMark). The loop - // is responsible for filling the buffer with enough data if such data - // is available. If highWaterMark is 0 and we are not in the flowing mode - // we should _not_ attempt to buffer any extra data. We'll get more data - // when the stream consumer calls read() instead. - // - No data in the buffer, and the stream is in flowing mode. In this mode - // the loop below is responsible for ensuring read() is called. Failing to - // call read here would abort the flow and there's no other mechanism for - // continuing the flow if the stream consumer has just subscribed to the - // 'data' event. - // - // In addition to the above conditions to keep reading data, the following - // conditions prevent the data from being read: - // - The stream has ended (state.ended). - // - There is already a pending 'read' operation (state.reading). This is a - // case where the stream has called the implementation defined _read() - // method, but they are processing the call asynchronously and have _not_ - // called push() with new data. In this case we skip performing more - // read()s. The execution ends in this method again after the _read() ends - // up calling push() with more data. - while ( - !state.reading && - !state.ended && - (state.length < state.highWaterMark || (state.flowing && state.length === 0)) - ) { - const len = state.length - debug('maybeReadMore read 0') - stream.read(0) - if (len === state.length) - // Didn't get any data, stop spinning. - break - } - state.readingMore = false -} - -// Abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - throw new ERR_METHOD_NOT_IMPLEMENTED('_read()') -} -Readable.prototype.pipe = function (dest, pipeOpts) { - const src = this - const state = this._readableState - if (state.pipes.length === 1) { - if (!state.multiAwaitDrain) { - state.multiAwaitDrain = true - state.awaitDrainWriters = new SafeSet(state.awaitDrainWriters ? [state.awaitDrainWriters] : []) + if ( + n < streams.length - 1 && + !(isReadable(streams[n]) || isReadableStream(streams[n]) || isTransformStream(streams[n])) + ) { + throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be readable') } - } - state.pipes.push(dest) - debug('pipe count=%d opts=%j', state.pipes.length, pipeOpts) - const doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr - const endFn = doEnd ? onend : unpipe - if (state.endEmitted) process.nextTick(endFn) - else src.once('end', endFn) - dest.on('unpipe', onunpipe) - function onunpipe(readable, unpipeInfo) { - debug('onunpipe') - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true - cleanup() - } + if (n > 0 && !(isWritable(streams[n]) || isWritableStream(streams[n]) || isTransformStream(streams[n]))) { + throw new ERR_INVALID_ARG_VALUE(`streams[${n}]`, orgStreams[n], 'must be writable') } } - function onend() { - debug('onend') - dest.end() - } let ondrain - let cleanedUp = false - function cleanup() { - debug('cleanup') - // Cleanup event handlers once the pipe is broken. - dest.removeListener('close', onclose) - dest.removeListener('finish', onfinish) - if (ondrain) { - dest.removeListener('drain', ondrain) + let onfinish + let onreadable + let onclose + let d + function onfinished(err) { + const cb = onclose + onclose = null + if (cb) { + cb(err) + } else if (err) { + d.destroy(err) + } else if (!readable && !writable) { + d.destroy() } - dest.removeListener('error', onerror) - dest.removeListener('unpipe', onunpipe) - src.removeListener('end', onend) - src.removeListener('end', unpipe) - src.removeListener('data', ondata) - cleanedUp = true - - // If the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (ondrain && state.awaitDrainWriters && (!dest._writableState || dest._writableState.needDrain)) ondrain() } - function pause() { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if (!cleanedUp) { - if (state.pipes.length === 1 && state.pipes[0] === dest) { - debug('false write response, pause', 0) - state.awaitDrainWriters = dest - state.multiAwaitDrain = false - } else if (state.pipes.length > 1 && state.pipes.includes(dest)) { - debug('false write response, pause', state.awaitDrainWriters.size) - state.awaitDrainWriters.add(dest) + const head = streams[0] + const tail = pipeline(streams, onfinished) + const writable = !!(isWritable(head) || isWritableStream(head) || isTransformStream(head)) + const readable = !!(isReadable(tail) || isReadableStream(tail) || isTransformStream(tail)) + + // TODO(ronag): Avoid double buffering. + // Implement Writable/Readable/Duplex traits. + // See, https://github.com/nodejs/node/pull/33515. + d = new Duplex({ + // TODO (ronag): highWaterMark? + writableObjectMode: !!(head !== null && head !== undefined && head.writableObjectMode), + readableObjectMode: !!(tail !== null && tail !== undefined && tail.readableObjectMode), + writable, + readable + }) + if (writable) { + if (isNodeStream(head)) { + d._write = function (chunk, encoding, callback) { + if (head.write(chunk, encoding)) { + callback() + } else { + ondrain = callback + } + } + d._final = function (callback) { + head.end() + onfinish = callback + } + head.on('drain', function () { + if (ondrain) { + const cb = ondrain + ondrain = null + cb() + } + }) + } else if (isWebStream(head)) { + const writable = isTransformStream(head) ? head.writable : head + const writer = writable.getWriter() + d._write = async function (chunk, encoding, callback) { + try { + await writer.ready + writer.write(chunk).catch(() => {}) + callback() + } catch (err) { + callback(err) + } + } + d._final = async function (callback) { + try { + await writer.ready + writer.close().catch(() => {}) + onfinish = callback + } catch (err) { + callback(err) + } } - src.pause() - } - if (!ondrain) { - // When the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - ondrain = pipeOnDrain(src, dest) - dest.on('drain', ondrain) } + const toRead = isTransformStream(tail) ? tail.readable : tail + eos(toRead, () => { + if (onfinish) { + const cb = onfinish + onfinish = null + cb() + } + }) } - src.on('data', ondata) - function ondata(chunk) { - debug('ondata') - const ret = dest.write(chunk) - debug('dest.write', ret) - if (ret === false) { - pause() + if (readable) { + if (isNodeStream(tail)) { + tail.on('readable', function () { + if (onreadable) { + const cb = onreadable + onreadable = null + cb() + } + }) + tail.on('end', function () { + d.push(null) + }) + d._read = function () { + while (true) { + const buf = tail.read() + if (buf === null) { + onreadable = d._read + return + } + if (!d.push(buf)) { + return + } + } + } + } else if (isWebStream(tail)) { + const readable = isTransformStream(tail) ? tail.readable : tail + const reader = readable.getReader() + d._read = async function () { + while (true) { + try { + const { value, done } = await reader.read() + if (!d.push(value)) { + return + } + if (done) { + d.push(null) + return + } + } catch { + return + } + } + } } } - - // If the dest has an error, then stop piping into it. - // However, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er) - unpipe() - dest.removeListener('error', onerror) - if (dest.listenerCount('error') === 0) { - const s = dest._writableState || dest._readableState - if (s && !s.errorEmitted) { - // User incorrectly emitted 'error' directly on the stream. - errorOrDestroy(dest, er) - } else { - dest.emit('error', er) + d._destroy = function (err, callback) { + if (!err && onclose !== null) { + err = new AbortError() + } + onreadable = null + ondrain = null + onfinish = null + if (onclose === null) { + callback(err) + } else { + onclose = callback + if (isNodeStream(tail)) { + destroyer(tail, err) } } } + return d +} - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror) - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish) - unpipe() - } - dest.once('close', onclose) - function onfinish() { - debug('onfinish') - dest.removeListener('close', onclose) - unpipe() - } - dest.once('finish', onfinish) - function unpipe() { - debug('unpipe') - src.unpipe(dest) - } +/***/ }), - // Tell the dest that it's being piped to. - dest.emit('pipe', src) +/***/ 80064: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Start the flow if it hasn't been started already. +"use strict"; - if (dest.writableNeedDrain === true) { - pause() - } else if (!state.flowing) { - debug('pipe resume') - src.resume() - } - return dest -} -function pipeOnDrain(src, dest) { - return function pipeOnDrainFunctionResult() { - const state = src._readableState - // `ondrain` will call directly, - // `this` maybe not a reference to dest, - // so we use the real dest here. - if (state.awaitDrainWriters === dest) { - debug('pipeOnDrain', 1) - state.awaitDrainWriters = null - } else if (state.multiAwaitDrain) { - debug('pipeOnDrain', state.awaitDrainWriters.size) - state.awaitDrainWriters.delete(dest) +/* replacement start */ + +const process = __nccwpck_require__(76341) + +/* replacement end */ + +const { + aggregateTwoErrors, + codes: { ERR_MULTIPLE_CALLBACK }, + AbortError +} = __nccwpck_require__(72184) +const { Symbol } = __nccwpck_require__(62646) +const { kIsDestroyed, isDestroyed, isFinished, isServerRequest } = __nccwpck_require__(58650) +const kDestroy = Symbol('kDestroy') +const kConstruct = Symbol('kConstruct') +function checkError(err, w, r) { + if (err) { + // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 + err.stack // eslint-disable-line no-unused-expressions + + if (w && !w.errored) { + w.errored = err } - if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && src.listenerCount('data')) { - src.resume() + if (r && !r.errored) { + r.errored = err } } } -Readable.prototype.unpipe = function (dest) { - const state = this._readableState - const unpipeInfo = { - hasUnpiped: false - } - // If we're not piping anywhere, then do nothing. - if (state.pipes.length === 0) return this - if (!dest) { - // remove all. - const dests = state.pipes - state.pipes = [] - this.pause() - for (let i = 0; i < dests.length; i++) - dests[i].emit('unpipe', this, { - hasUnpiped: false - }) +// Backwards compat. cb() is undocumented and unused in core but +// unfortunately might be used by modules. +function destroy(err, cb) { + const r = this._readableState + const w = this._writableState + // With duplex streams we use the writable side for state. + const s = w || r + if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) { + if (typeof cb === 'function') { + cb() + } return this } - // Try to find the right one. - const index = ArrayPrototypeIndexOf(state.pipes, dest) - if (index === -1) return this - state.pipes.splice(index, 1) - if (state.pipes.length === 0) this.pause() - dest.emit('unpipe', this, unpipeInfo) - return this -} + // We set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + checkError(err, w, r) + if (w) { + w.destroyed = true + } + if (r) { + r.destroyed = true + } -// Set up data events if they are asked for -// Ensure readable listeners eventually get something. -Readable.prototype.on = function (ev, fn) { - const res = Stream.prototype.on.call(this, ev, fn) - const state = this._readableState - if (ev === 'data') { - // Update readableListening so that resume() may be a no-op - // a few lines down. This is needed to support once('readable'). - state.readableListening = this.listenerCount('readable') > 0 - - // Try start flowing on next tick if stream isn't explicitly paused. - if (state.flowing !== false) this.resume() - } else if (ev === 'readable') { - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true - state.flowing = false - state.emittedReadable = false - debug('on readable', state.length, state.reading) - if (state.length) { - emitReadable(this) - } else if (!state.reading) { - process.nextTick(nReadingNextTick, this) - } - } - } - return res -} -Readable.prototype.addListener = Readable.prototype.on -Readable.prototype.removeListener = function (ev, fn) { - const res = Stream.prototype.removeListener.call(this, ev, fn) - if (ev === 'readable') { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this) + // If still constructing then defer calling _destroy. + if (!s.constructed) { + this.once(kDestroy, function (er) { + _destroy(this, aggregateTwoErrors(er, err), cb) + }) + } else { + _destroy(this, err, cb) } - return res + return this } -Readable.prototype.off = Readable.prototype.removeListener -Readable.prototype.removeAllListeners = function (ev) { - const res = Stream.prototype.removeAllListeners.apply(this, arguments) - if (ev === 'readable' || ev === undefined) { - // We need to check if there is someone still listening to - // readable and reset the state. However this needs to happen - // after readable has been emitted but before I/O (nextTick) to - // support once('readable', fn) cycles. This means that calling - // resume within the same tick will have no - // effect. - process.nextTick(updateReadableListening, this) +function _destroy(self, err, cb) { + let called = false + function onDestroy(err) { + if (called) { + return + } + called = true + const r = self._readableState + const w = self._writableState + checkError(err, w, r) + if (w) { + w.closed = true + } + if (r) { + r.closed = true + } + if (typeof cb === 'function') { + cb(err) + } + if (err) { + process.nextTick(emitErrorCloseNT, self, err) + } else { + process.nextTick(emitCloseNT, self) + } } - return res -} -function updateReadableListening(self) { - const state = self._readableState - state.readableListening = self.listenerCount('readable') > 0 - if (state.resumeScheduled && state[kPaused] === false) { - // Flowing needs to be set to true now, otherwise - // the upcoming resume will not flow. - state.flowing = true - - // Crude way to check if we should resume. - } else if (self.listenerCount('data') > 0) { - self.resume() - } else if (!state.readableListening) { - state.flowing = null + try { + self._destroy(err || null, onDestroy) + } catch (err) { + onDestroy(err) } } -function nReadingNextTick(self) { - debug('readable nexttick read 0') - self.read(0) +function emitErrorCloseNT(self, err) { + emitErrorNT(self, err) + emitCloseNT(self) } - -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - const state = this._readableState - if (!state.flowing) { - debug('resume') - // We flow only if there is no one listening - // for readable, but we still have to call - // resume(). - state.flowing = !state.readableListening - resume(this, state) +function emitCloseNT(self) { + const r = self._readableState + const w = self._writableState + if (w) { + w.closeEmitted = true } - state[kPaused] = false - return this -} -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true - process.nextTick(resume_, stream, state) + if (r) { + r.closeEmitted = true } -} -function resume_(stream, state) { - debug('resume', state.reading) - if (!state.reading) { - stream.read(0) + if ((w !== null && w !== undefined && w.emitClose) || (r !== null && r !== undefined && r.emitClose)) { + self.emit('close') } - state.resumeScheduled = false - stream.emit('resume') - flow(stream) - if (state.flowing && !state.reading) stream.read(0) } -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing) - if (this._readableState.flowing !== false) { - debug('pause') - this._readableState.flowing = false - this.emit('pause') +function emitErrorNT(self, err) { + const r = self._readableState + const w = self._writableState + if ((w !== null && w !== undefined && w.errorEmitted) || (r !== null && r !== undefined && r.errorEmitted)) { + return } - this._readableState[kPaused] = true - return this + if (w) { + w.errorEmitted = true + } + if (r) { + r.errorEmitted = true + } + self.emit('error', err) } -function flow(stream) { - const state = stream._readableState - debug('flow', state.flowing) - while (state.flowing && stream.read() !== null); +function undestroy() { + const r = this._readableState + const w = this._writableState + if (r) { + r.constructed = true + r.closed = false + r.closeEmitted = false + r.destroyed = false + r.errored = null + r.errorEmitted = false + r.reading = false + r.ended = r.readable === false + r.endEmitted = r.readable === false + } + if (w) { + w.constructed = true + w.destroyed = false + w.closed = false + w.closeEmitted = false + w.errored = null + w.errorEmitted = false + w.finalCalled = false + w.prefinished = false + w.ended = w.writable === false + w.ending = w.writable === false + w.finished = w.writable === false + } } +function errorOrDestroy(stream, err, sync) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. -// Wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - let paused = false - - // TODO (ronag): Should this.destroy(err) emit - // 'error' on the wrapped stream? Would require - // a static factory method, e.g. Readable.wrap(stream). + const r = stream._readableState + const w = stream._writableState + if ((w !== null && w !== undefined && w.destroyed) || (r !== null && r !== undefined && r.destroyed)) { + return this + } + if ((r !== null && r !== undefined && r.autoDestroy) || (w !== null && w !== undefined && w.autoDestroy)) + stream.destroy(err) + else if (err) { + // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 + err.stack // eslint-disable-line no-unused-expressions - stream.on('data', (chunk) => { - if (!this.push(chunk) && stream.pause) { - paused = true - stream.pause() + if (w && !w.errored) { + w.errored = err } - }) - stream.on('end', () => { - this.push(null) - }) - stream.on('error', (err) => { - errorOrDestroy(this, err) - }) - stream.on('close', () => { - this.destroy() - }) - stream.on('destroy', () => { - this.destroy() - }) - this._read = () => { - if (paused && stream.resume) { - paused = false - stream.resume() + if (r && !r.errored) { + r.errored = err } - } - - // Proxy all the other methods. Important when wrapping filters and duplexes. - const streamKeys = ObjectKeys(stream) - for (let j = 1; j < streamKeys.length; j++) { - const i = streamKeys[j] - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = stream[i].bind(stream) + if (sync) { + process.nextTick(emitErrorNT, stream, err) + } else { + emitErrorNT(stream, err) } } - return this } -Readable.prototype[SymbolAsyncIterator] = function () { - return streamToAsyncIterator(this) -} -Readable.prototype.iterator = function (options) { - if (options !== undefined) { - validateObject(options, 'options') +function construct(stream, cb) { + if (typeof stream._construct !== 'function') { + return } - return streamToAsyncIterator(this, options) -} -function streamToAsyncIterator(stream, options) { - if (typeof stream.read !== 'function') { - stream = Readable.wrap(stream, { - objectMode: true - }) + const r = stream._readableState + const w = stream._writableState + if (r) { + r.constructed = false } - const iter = createAsyncIterator(stream, options) - iter.stream = stream - return iter + if (w) { + w.constructed = false + } + stream.once(kConstruct, cb) + if (stream.listenerCount(kConstruct) > 1) { + // Duplex + return + } + process.nextTick(constructNT, stream) } -async function* createAsyncIterator(stream, options) { - let callback = nop - function next(resolve) { - if (this === stream) { - callback() - callback = nop - } else { - callback = resolve +function constructNT(stream) { + let called = false + function onConstruct(err) { + if (called) { + errorOrDestroy(stream, err !== null && err !== undefined ? err : new ERR_MULTIPLE_CALLBACK()) + return } - } - stream.on('readable', next) - let error - const cleanup = eos( - stream, - { - writable: false - }, - (err) => { - error = err ? aggregateTwoErrors(error, err) : null - callback() - callback = nop + called = true + const r = stream._readableState + const w = stream._writableState + const s = w || r + if (r) { + r.constructed = true } - ) - try { - while (true) { - const chunk = stream.destroyed ? null : stream.read() - if (chunk !== null) { - yield chunk - } else if (error) { - throw error - } else if (error === null) { - return - } else { - await new Promise(next) - } + if (w) { + w.constructed = true } - } catch (err) { - error = aggregateTwoErrors(error, err) - throw error - } finally { - if ( - (error || (options === null || options === undefined ? undefined : options.destroyOnReturn) !== false) && - (error === undefined || stream._readableState.autoDestroy) - ) { - destroyImpl.destroyer(stream, null) + if (s.destroyed) { + stream.emit(kDestroy, err) + } else if (err) { + errorOrDestroy(stream, err, true) } else { - stream.off('readable', next) - cleanup() + process.nextTick(emitConstructNT, stream) } } + try { + stream._construct((err) => { + process.nextTick(onConstruct, err) + }) + } catch (err) { + process.nextTick(onConstruct, err) + } +} +function emitConstructNT(stream) { + stream.emit(kConstruct) +} +function isRequest(stream) { + return (stream === null || stream === undefined ? undefined : stream.setHeader) && typeof stream.abort === 'function' +} +function emitCloseLegacy(stream) { + stream.emit('close') +} +function emitErrorCloseLegacy(stream, err) { + stream.emit('error', err) + process.nextTick(emitCloseLegacy, stream) } -// Making it explicit these properties are not enumerable -// because otherwise some prototype manipulation in -// userland will fail. -ObjectDefineProperties(Readable.prototype, { - readable: { - __proto__: null, - get() { - const r = this._readableState - // r.readable === false means that this is part of a Duplex stream - // where the readable side was disabled upon construction. - // Compat. The user might manually disable readable side through - // deprecated setter. - return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && !r.endEmitted - }, - set(val) { - // Backwards compat. - if (this._readableState) { - this._readableState.readable = !!val - } +// Normalize destroy for legacy. +function destroyer(stream, err) { + if (!stream || isDestroyed(stream)) { + return + } + if (!err && !isFinished(stream)) { + err = new AbortError() + } + + // TODO: Remove isRequest branches. + if (isServerRequest(stream)) { + stream.socket = null + stream.destroy(err) + } else if (isRequest(stream)) { + stream.abort() + } else if (isRequest(stream.req)) { + stream.req.abort() + } else if (typeof stream.destroy === 'function') { + stream.destroy(err) + } else if (typeof stream.close === 'function') { + // TODO: Don't lose err? + stream.close() + } else if (err) { + process.nextTick(emitErrorCloseLegacy, stream, err) + } else { + process.nextTick(emitCloseLegacy, stream) + } + if (!stream.destroyed) { + stream[kIsDestroyed] = true + } +} +module.exports = { + construct, + destroyer, + destroy, + undestroy, + errorOrDestroy +} + + +/***/ }), + +/***/ 67799: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototype inheritance, this class +// prototypically inherits from Readable, and then parasitically from +// Writable. + + + +const { + ObjectDefineProperties, + ObjectGetOwnPropertyDescriptor, + ObjectKeys, + ObjectSetPrototypeOf +} = __nccwpck_require__(62646) +module.exports = Duplex +const Readable = __nccwpck_require__(83176) +const Writable = __nccwpck_require__(59482) +ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype) +ObjectSetPrototypeOf(Duplex, Readable) +{ + const keys = ObjectKeys(Writable.prototype) + // Allow the keys array to be GC'ed. + for (let i = 0; i < keys.length; i++) { + const method = keys[i] + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method] + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options) + Readable.call(this, options) + Writable.call(this, options) + if (options) { + this.allowHalfOpen = options.allowHalfOpen !== false + if (options.readable === false) { + this._readableState.readable = false + this._readableState.ended = true + this._readableState.endEmitted = true + } + if (options.writable === false) { + this._writableState.writable = false + this._writableState.ending = true + this._writableState.ended = true + this._writableState.finished = true } + } else { + this.allowHalfOpen = true + } +} +ObjectDefineProperties(Duplex.prototype, { + writable: { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writable') }, - readableDidRead: { + writableHighWaterMark: { __proto__: null, - enumerable: false, - get: function () { - return this._readableState.dataEmitted - } + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableHighWaterMark') }, - readableAborted: { + writableObjectMode: { __proto__: null, - enumerable: false, - get: function () { - return !!( - this._readableState.readable !== false && - (this._readableState.destroyed || this._readableState.errored) && - !this._readableState.endEmitted - ) - } - }, - readableHighWaterMark: { - __proto__: null, - enumerable: false, - get: function () { - return this._readableState.highWaterMark - } - }, - readableBuffer: { - __proto__: null, - enumerable: false, - get: function () { - return this._readableState && this._readableState.buffer - } + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableObjectMode') }, - readableFlowing: { + writableBuffer: { __proto__: null, - enumerable: false, - get: function () { - return this._readableState.flowing - }, - set: function (state) { - if (this._readableState) { - this._readableState.flowing = state - } - } + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableBuffer') }, - readableLength: { + writableLength: { __proto__: null, - enumerable: false, - get() { - return this._readableState.length - } + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableLength') }, - readableObjectMode: { + writableFinished: { __proto__: null, - enumerable: false, - get() { - return this._readableState ? this._readableState.objectMode : false - } + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableFinished') }, - readableEncoding: { + writableCorked: { __proto__: null, - enumerable: false, - get() { - return this._readableState ? this._readableState.encoding : null - } + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableCorked') }, - errored: { + writableEnded: { __proto__: null, - enumerable: false, - get() { - return this._readableState ? this._readableState.errored : null - } + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableEnded') }, - closed: { + writableNeedDrain: { __proto__: null, - get() { - return this._readableState ? this._readableState.closed : false - } + ...ObjectGetOwnPropertyDescriptor(Writable.prototype, 'writableNeedDrain') }, destroyed: { __proto__: null, - enumerable: false, get() { - return this._readableState ? this._readableState.destroyed : false + if (this._readableState === undefined || this._writableState === undefined) { + return false + } + return this._readableState.destroyed && this._writableState.destroyed }, set(value) { - // We ignore the value if the stream - // has not been initialized yet. - if (!this._readableState) { - return - } - // Backward compatibility, the user is explicitly // managing destroyed. - this._readableState.destroyed = value - } - }, - readableEnded: { - __proto__: null, - enumerable: false, - get() { - return this._readableState ? this._readableState.endEmitted : false - } - } -}) -ObjectDefineProperties(ReadableState.prototype, { - // Legacy getter for `pipesCount`. - pipesCount: { - __proto__: null, - get() { - return this.pipes.length - } - }, - // Legacy property for `paused`. - paused: { - __proto__: null, - get() { - return this[kPaused] !== false - }, - set(value) { - this[kPaused] = !!value - } - } -}) - -// Exposed for testing purposes only. -Readable._fromList = fromList - -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered. - if (state.length === 0) return null - let ret - if (state.objectMode) ret = state.buffer.shift() - else if (!n || n >= state.length) { - // Read it all, truncate the list. - if (state.decoder) ret = state.buffer.join('') - else if (state.buffer.length === 1) ret = state.buffer.first() - else ret = state.buffer.concat(state.length) - state.buffer.clear() - } else { - // read part of list. - ret = state.buffer.consume(n, state.decoder) - } - return ret -} -function endReadable(stream) { - const state = stream._readableState - debug('endReadable', state.endEmitted) - if (!state.endEmitted) { - state.ended = true - process.nextTick(endReadableNT, state, stream) - } -} -function endReadableNT(state, stream) { - debug('endReadableNT', state.endEmitted, state.length) - - // Check that we didn't get one last unshift. - if (!state.errored && !state.closeEmitted && !state.endEmitted && state.length === 0) { - state.endEmitted = true - stream.emit('end') - if (stream.writable && stream.allowHalfOpen === false) { - process.nextTick(endWritableNT, stream) - } else if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the writable side is ready for autoDestroy as well. - const wState = stream._writableState - const autoDestroy = - !wState || - (wState.autoDestroy && - // We don't expect the writable to ever 'finish' - // if writable is explicitly set to false. - (wState.finished || wState.writable === false)) - if (autoDestroy) { - stream.destroy() + if (this._readableState && this._writableState) { + this._readableState.destroyed = value + this._writableState.destroyed = value } } } -} -function endWritableNT(stream) { - const writable = stream.writable && !stream.writableEnded && !stream.destroyed - if (writable) { - stream.end() - } -} -Readable.from = function (iterable, opts) { - return from(Readable, iterable, opts) -} +}) let webStreamsAdapters // Lazy to avoid circular references @@ -76820,30784 +68593,29206 @@ function lazyWebStreams() { if (webStreamsAdapters === undefined) webStreamsAdapters = {} return webStreamsAdapters } -Readable.fromWeb = function (readableStream, options) { - return lazyWebStreams().newStreamReadableFromReadableStream(readableStream, options) -} -Readable.toWeb = function (streamReadable, options) { - return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable, options) -} -Readable.wrap = function (src, options) { - var _ref, _src$readableObjectMo - return new Readable({ - objectMode: - (_ref = - (_src$readableObjectMo = src.readableObjectMode) !== null && _src$readableObjectMo !== undefined - ? _src$readableObjectMo - : src.objectMode) !== null && _ref !== undefined - ? _ref - : true, - ...options, - destroy(err, callback) { - destroyImpl.destroyer(src, err) - callback(err) - } - }).wrap(src) -} - - -/***/ }), - -/***/ 89025: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { MathFloor, NumberIsInteger } = __nccwpck_require__(62141) -const { validateInteger } = __nccwpck_require__(95529) -const { ERR_INVALID_ARG_VALUE } = (__nccwpck_require__(49939).codes) -let defaultHighWaterMarkBytes = 16 * 1024 -let defaultHighWaterMarkObjectMode = 16 -function highWaterMarkFrom(options, isDuplex, duplexKey) { - return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null -} -function getDefaultHighWaterMark(objectMode) { - return objectMode ? defaultHighWaterMarkObjectMode : defaultHighWaterMarkBytes +Duplex.fromWeb = function (pair, options) { + return lazyWebStreams().newStreamDuplexFromReadableWritablePair(pair, options) } -function setDefaultHighWaterMark(objectMode, value) { - validateInteger(value, 'value', 0) - if (objectMode) { - defaultHighWaterMarkObjectMode = value - } else { - defaultHighWaterMarkBytes = value - } +Duplex.toWeb = function (duplex) { + return lazyWebStreams().newReadableWritablePairFromDuplex(duplex) } -function getHighWaterMark(state, options, duplexKey, isDuplex) { - const hwm = highWaterMarkFrom(options, isDuplex, duplexKey) - if (hwm != null) { - if (!NumberIsInteger(hwm) || hwm < 0) { - const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark' - throw new ERR_INVALID_ARG_VALUE(name, hwm) - } - return MathFloor(hwm) +let duplexify +Duplex.from = function (body) { + if (!duplexify) { + duplexify = __nccwpck_require__(76306) } - - // Default value - return getDefaultHighWaterMark(state.objectMode) -} -module.exports = { - getHighWaterMark, - getDefaultHighWaterMark, - setDefaultHighWaterMark + return duplexify(body, 'body') } /***/ }), -/***/ 84186: +/***/ 76306: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. +/* replacement start */ -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. +const process = __nccwpck_require__(76341) +/* replacement end */ + +;('use strict') +const bufferModule = __nccwpck_require__(14300) +const { + isReadable, + isWritable, + isIterable, + isNodeStream, + isReadableNodeStream, + isWritableNodeStream, + isDuplexNodeStream, + isReadableStream, + isWritableStream +} = __nccwpck_require__(58650) +const eos = __nccwpck_require__(51604) +const { + AbortError, + codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_RETURN_VALUE } +} = __nccwpck_require__(72184) +const { destroyer } = __nccwpck_require__(80064) +const Duplex = __nccwpck_require__(67799) +const Readable = __nccwpck_require__(83176) +const Writable = __nccwpck_require__(59482) +const { createDeferredPromise } = __nccwpck_require__(5915) +const from = __nccwpck_require__(80169) +const Blob = globalThis.Blob || bufferModule.Blob +const isBlob = + typeof Blob !== 'undefined' + ? function isBlob(b) { + return b instanceof Blob + } + : function isBlob(b) { + return false + } +const AbortController = globalThis.AbortController || (__nccwpck_require__(71935).AbortController) +const { FunctionPrototypeCall } = __nccwpck_require__(62646) +// This is needed for pre node 17. +class Duplexify extends Duplex { + constructor(options) { + super(options) -const { ObjectSetPrototypeOf, Symbol } = __nccwpck_require__(62141) -module.exports = Transform -const { ERR_METHOD_NOT_IMPLEMENTED } = (__nccwpck_require__(49939).codes) -const Duplex = __nccwpck_require__(7280) -const { getHighWaterMark } = __nccwpck_require__(89025) -ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype) -ObjectSetPrototypeOf(Transform, Duplex) -const kCallback = Symbol('kCallback') -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options) + // https://github.com/nodejs/node/pull/34385 - // TODO (ronag): This should preferably always be - // applied but would be semver-major. Or even better; - // make Transform a Readable with the Writable interface. - const readableHighWaterMark = options ? getHighWaterMark(this, options, 'readableHighWaterMark', true) : null - if (readableHighWaterMark === 0) { - // A Duplex will buffer both on the writable and readable side while - // a Transform just wants to buffer hwm number of elements. To avoid - // buffering twice we disable buffering on the writable side. - options = { - ...options, - highWaterMark: null, - readableHighWaterMark, - // TODO (ronag): 0 is not optimal since we have - // a "bug" where we check needDrain before calling _write and not after. - // Refs: https://github.com/nodejs/node/pull/32887 - // Refs: https://github.com/nodejs/node/pull/35941 - writableHighWaterMark: options.writableHighWaterMark || 0 + if ((options === null || options === undefined ? undefined : options.readable) === false) { + this._readableState.readable = false + this._readableState.ended = true + this._readableState.endEmitted = true + } + if ((options === null || options === undefined ? undefined : options.writable) === false) { + this._writableState.writable = false + this._writableState.ending = true + this._writableState.ended = true + this._writableState.finished = true } } - Duplex.call(this, options) - - // We have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false - this[kCallback] = null - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform - if (typeof options.flush === 'function') this._flush = options.flush - } - - // When the writable side finishes, then flush out anything remaining. - // Backwards compat. Some Transform streams incorrectly implement _final - // instead of or in addition to _flush. By using 'prefinish' instead of - // implementing _final we continue supporting this unfortunate use case. - this.on('prefinish', prefinish) } -function final(cb) { - if (typeof this._flush === 'function' && !this.destroyed) { - this._flush((er, data) => { - if (er) { - if (cb) { - cb(er) - } else { - this.destroy(er) - } - return - } - if (data != null) { - this.push(data) - } - this.push(null) - if (cb) { - cb() - } +module.exports = function duplexify(body, name) { + if (isDuplexNodeStream(body)) { + return body + } + if (isReadableNodeStream(body)) { + return _duplexify({ + readable: body }) - } else { - this.push(null) - if (cb) { - cb() - } } -} -function prefinish() { - if (this._final !== final) { - final.call(this) + if (isWritableNodeStream(body)) { + return _duplexify({ + writable: body + }) } -} -Transform.prototype._final = final -Transform.prototype._transform = function (chunk, encoding, callback) { - throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()') -} -Transform.prototype._write = function (chunk, encoding, callback) { - const rState = this._readableState - const wState = this._writableState - const length = rState.length - this._transform(chunk, encoding, (err, val) => { - if (err) { - callback(err) - return - } - if (val != null) { - this.push(val) + if (isNodeStream(body)) { + return _duplexify({ + writable: false, + readable: false + }) + } + if (isReadableStream(body)) { + return _duplexify({ + readable: Readable.fromWeb(body) + }) + } + if (isWritableStream(body)) { + return _duplexify({ + writable: Writable.fromWeb(body) + }) + } + if (typeof body === 'function') { + const { value, write, final, destroy } = fromAsyncGen(body) + if (isIterable(value)) { + return from(Duplexify, value, { + // TODO (ronag): highWaterMark? + objectMode: true, + write, + final, + destroy + }) } - if ( - wState.ended || - // Backwards compat. - length === rState.length || - // Backwards compat. - rState.length < rState.highWaterMark - ) { - callback() - } else { - this[kCallback] = callback + const then = value === null || value === undefined ? undefined : value.then + if (typeof then === 'function') { + let d + const promise = FunctionPrototypeCall( + then, + value, + (val) => { + if (val != null) { + throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val) + } + }, + (err) => { + destroyer(d, err) + } + ) + return (d = new Duplexify({ + // TODO (ronag): highWaterMark? + objectMode: true, + readable: false, + write, + final(cb) { + final(async () => { + try { + await promise + process.nextTick(cb, null) + } catch (err) { + process.nextTick(cb, err) + } + }) + }, + destroy + })) } - }) + throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or AsyncFunction', name, value) + } + if (isBlob(body)) { + return duplexify(body.arrayBuffer()) + } + if (isIterable(body)) { + return from(Duplexify, body, { + // TODO (ronag): highWaterMark? + objectMode: true, + writable: false + }) + } + if ( + isReadableStream(body === null || body === undefined ? undefined : body.readable) && + isWritableStream(body === null || body === undefined ? undefined : body.writable) + ) { + return Duplexify.fromWeb(body) + } + if ( + typeof (body === null || body === undefined ? undefined : body.writable) === 'object' || + typeof (body === null || body === undefined ? undefined : body.readable) === 'object' + ) { + const readable = + body !== null && body !== undefined && body.readable + ? isReadableNodeStream(body === null || body === undefined ? undefined : body.readable) + ? body === null || body === undefined + ? undefined + : body.readable + : duplexify(body.readable) + : undefined + const writable = + body !== null && body !== undefined && body.writable + ? isWritableNodeStream(body === null || body === undefined ? undefined : body.writable) + ? body === null || body === undefined + ? undefined + : body.writable + : duplexify(body.writable) + : undefined + return _duplexify({ + readable, + writable + }) + } + const then = body === null || body === undefined ? undefined : body.then + if (typeof then === 'function') { + let d + FunctionPrototypeCall( + then, + body, + (val) => { + if (val != null) { + d.push(val) + } + d.push(null) + }, + (err) => { + destroyer(d, err) + } + ) + return (d = new Duplexify({ + objectMode: true, + writable: false, + read() {} + })) + } + throw new ERR_INVALID_ARG_TYPE( + name, + [ + 'Blob', + 'ReadableStream', + 'WritableStream', + 'Stream', + 'Iterable', + 'AsyncIterable', + 'Function', + '{ readable, writable } pair', + 'Promise' + ], + body + ) } -Transform.prototype._read = function () { - if (this[kCallback]) { - const callback = this[kCallback] - this[kCallback] = null - callback() +function fromAsyncGen(fn) { + let { promise, resolve } = createDeferredPromise() + const ac = new AbortController() + const signal = ac.signal + const value = fn( + (async function* () { + while (true) { + const _promise = promise + promise = null + const { chunk, done, cb } = await _promise + process.nextTick(cb) + if (done) return + if (signal.aborted) + throw new AbortError(undefined, { + cause: signal.reason + }) + ;({ promise, resolve } = createDeferredPromise()) + yield chunk + } + })(), + { + signal + } + ) + return { + value, + write(chunk, encoding, cb) { + const _resolve = resolve + resolve = null + _resolve({ + chunk, + done: false, + cb + }) + }, + final(cb) { + const _resolve = resolve + resolve = null + _resolve({ + done: true, + cb + }) + }, + destroy(err, cb) { + ac.abort() + cb(err) + } + } +} +function _duplexify(pair) { + const r = pair.readable && typeof pair.readable.read !== 'function' ? Readable.wrap(pair.readable) : pair.readable + const w = pair.writable + let readable = !!isReadable(r) + let writable = !!isWritable(w) + let ondrain + let onfinish + let onreadable + let onclose + let d + function onfinished(err) { + const cb = onclose + onclose = null + if (cb) { + cb(err) + } else if (err) { + d.destroy(err) + } + } + + // TODO(ronag): Avoid double buffering. + // Implement Writable/Readable/Duplex traits. + // See, https://github.com/nodejs/node/pull/33515. + d = new Duplexify({ + // TODO (ronag): highWaterMark? + readableObjectMode: !!(r !== null && r !== undefined && r.readableObjectMode), + writableObjectMode: !!(w !== null && w !== undefined && w.writableObjectMode), + readable, + writable + }) + if (writable) { + eos(w, (err) => { + writable = false + if (err) { + destroyer(r, err) + } + onfinished(err) + }) + d._write = function (chunk, encoding, callback) { + if (w.write(chunk, encoding)) { + callback() + } else { + ondrain = callback + } + } + d._final = function (callback) { + w.end() + onfinish = callback + } + w.on('drain', function () { + if (ondrain) { + const cb = ondrain + ondrain = null + cb() + } + }) + w.on('finish', function () { + if (onfinish) { + const cb = onfinish + onfinish = null + cb() + } + }) + } + if (readable) { + eos(r, (err) => { + readable = false + if (err) { + destroyer(r, err) + } + onfinished(err) + }) + r.on('readable', function () { + if (onreadable) { + const cb = onreadable + onreadable = null + cb() + } + }) + r.on('end', function () { + d.push(null) + }) + d._read = function () { + while (true) { + const buf = r.read() + if (buf === null) { + onreadable = d._read + return + } + if (!d.push(buf)) { + return + } + } + } + } + d._destroy = function (err, callback) { + if (!err && onclose !== null) { + err = new AbortError() + } + onreadable = null + ondrain = null + onfinish = null + if (onclose === null) { + callback(err) + } else { + onclose = callback + destroyer(w, err) + destroyer(r, err) + } } + return d } /***/ }), -/***/ 46995: +/***/ 51604: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; - - -const { SymbolAsyncIterator, SymbolIterator, SymbolFor } = __nccwpck_require__(62141) - -// We need to use SymbolFor to make these globally available -// for interopt with readable-stream, i.e. readable-stream -// and node core needs to be able to read/write private state -// from each other for proper interoperability. -const kIsDestroyed = SymbolFor('nodejs.stream.destroyed') -const kIsErrored = SymbolFor('nodejs.stream.errored') -const kIsReadable = SymbolFor('nodejs.stream.readable') -const kIsWritable = SymbolFor('nodejs.stream.writable') -const kIsDisturbed = SymbolFor('nodejs.stream.disturbed') -const kIsClosedPromise = SymbolFor('nodejs.webstream.isClosedPromise') -const kControllerErrorFunction = SymbolFor('nodejs.webstream.controllerErrorFunction') -function isReadableNodeStream(obj, strict = false) { - var _obj$_readableState - return !!( - ( - obj && - typeof obj.pipe === 'function' && - typeof obj.on === 'function' && - (!strict || (typeof obj.pause === 'function' && typeof obj.resume === 'function')) && - (!obj._writableState || - ((_obj$_readableState = obj._readableState) === null || _obj$_readableState === undefined - ? undefined - : _obj$_readableState.readable) !== false) && - // Duplex - (!obj._writableState || obj._readableState) - ) // Writable has .pipe. - ) -} - -function isWritableNodeStream(obj) { - var _obj$_writableState - return !!( - ( - obj && - typeof obj.write === 'function' && - typeof obj.on === 'function' && - (!obj._readableState || - ((_obj$_writableState = obj._writableState) === null || _obj$_writableState === undefined - ? undefined - : _obj$_writableState.writable) !== false) - ) // Duplex - ) -} - -function isDuplexNodeStream(obj) { - return !!( - obj && - typeof obj.pipe === 'function' && - obj._readableState && - typeof obj.on === 'function' && - typeof obj.write === 'function' - ) -} -function isNodeStream(obj) { - return ( - obj && - (obj._readableState || - obj._writableState || - (typeof obj.write === 'function' && typeof obj.on === 'function') || - (typeof obj.pipe === 'function' && typeof obj.on === 'function')) - ) -} -function isReadableStream(obj) { - return !!( - obj && - !isNodeStream(obj) && - typeof obj.pipeThrough === 'function' && - typeof obj.getReader === 'function' && - typeof obj.cancel === 'function' - ) -} -function isWritableStream(obj) { - return !!(obj && !isNodeStream(obj) && typeof obj.getWriter === 'function' && typeof obj.abort === 'function') -} -function isTransformStream(obj) { - return !!(obj && !isNodeStream(obj) && typeof obj.readable === 'object' && typeof obj.writable === 'object') -} -function isWebStream(obj) { - return isReadableStream(obj) || isWritableStream(obj) || isTransformStream(obj) -} -function isIterable(obj, isAsync) { - if (obj == null) return false - if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function' - if (isAsync === false) return typeof obj[SymbolIterator] === 'function' - return typeof obj[SymbolAsyncIterator] === 'function' || typeof obj[SymbolIterator] === 'function' -} -function isDestroyed(stream) { - if (!isNodeStream(stream)) return null - const wState = stream._writableState - const rState = stream._readableState - const state = wState || rState - return !!(stream.destroyed || stream[kIsDestroyed] || (state !== null && state !== undefined && state.destroyed)) -} - -// Have been end():d. -function isWritableEnded(stream) { - if (!isWritableNodeStream(stream)) return null - if (stream.writableEnded === true) return true - const wState = stream._writableState - if (wState !== null && wState !== undefined && wState.errored) return false - if (typeof (wState === null || wState === undefined ? undefined : wState.ended) !== 'boolean') return null - return wState.ended -} +/* replacement start */ -// Have emitted 'finish'. -function isWritableFinished(stream, strict) { - if (!isWritableNodeStream(stream)) return null - if (stream.writableFinished === true) return true - const wState = stream._writableState - if (wState !== null && wState !== undefined && wState.errored) return false - if (typeof (wState === null || wState === undefined ? undefined : wState.finished) !== 'boolean') return null - return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0)) -} +const process = __nccwpck_require__(76341) -// Have been push(null):d. -function isReadableEnded(stream) { - if (!isReadableNodeStream(stream)) return null - if (stream.readableEnded === true) return true - const rState = stream._readableState - if (!rState || rState.errored) return false - if (typeof (rState === null || rState === undefined ? undefined : rState.ended) !== 'boolean') return null - return rState.ended -} +/* replacement end */ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). -// Have emitted 'end'. -function isReadableFinished(stream, strict) { - if (!isReadableNodeStream(stream)) return null - const rState = stream._readableState - if (rState !== null && rState !== undefined && rState.errored) return false - if (typeof (rState === null || rState === undefined ? undefined : rState.endEmitted) !== 'boolean') return null - return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0)) -} -function isReadable(stream) { - if (stream && stream[kIsReadable] != null) return stream[kIsReadable] - if (typeof (stream === null || stream === undefined ? undefined : stream.readable) !== 'boolean') return null - if (isDestroyed(stream)) return false - return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream) -} -function isWritable(stream) { - if (stream && stream[kIsWritable] != null) return stream[kIsWritable] - if (typeof (stream === null || stream === undefined ? undefined : stream.writable) !== 'boolean') return null - if (isDestroyed(stream)) return false - return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream) -} -function isFinished(stream, opts) { - if (!isNodeStream(stream)) { - return null - } - if (isDestroyed(stream)) { - return true - } - if ((opts === null || opts === undefined ? undefined : opts.readable) !== false && isReadable(stream)) { - return false - } - if ((opts === null || opts === undefined ? undefined : opts.writable) !== false && isWritable(stream)) { - return false - } - return true -} -function isWritableErrored(stream) { - var _stream$_writableStat, _stream$_writableStat2 - if (!isNodeStream(stream)) { - return null - } - if (stream.writableErrored) { - return stream.writableErrored - } - return (_stream$_writableStat = - (_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === undefined - ? undefined - : _stream$_writableStat2.errored) !== null && _stream$_writableStat !== undefined - ? _stream$_writableStat - : null +;('use strict') +const { AbortError, codes } = __nccwpck_require__(72184) +const { ERR_INVALID_ARG_TYPE, ERR_STREAM_PREMATURE_CLOSE } = codes +const { kEmptyObject, once } = __nccwpck_require__(5915) +const { validateAbortSignal, validateFunction, validateObject, validateBoolean } = __nccwpck_require__(89371) +const { Promise, PromisePrototypeThen, SymbolDispose } = __nccwpck_require__(62646) +const { + isClosed, + isReadable, + isReadableNodeStream, + isReadableStream, + isReadableFinished, + isReadableErrored, + isWritable, + isWritableNodeStream, + isWritableStream, + isWritableFinished, + isWritableErrored, + isNodeStream, + willEmitClose: _willEmitClose, + kIsClosedPromise +} = __nccwpck_require__(58650) +let addAbortListener +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function' } -function isReadableErrored(stream) { - var _stream$_readableStat, _stream$_readableStat2 - if (!isNodeStream(stream)) { - return null +const nop = () => {} +function eos(stream, options, callback) { + var _options$readable, _options$writable + if (arguments.length === 2) { + callback = options + options = kEmptyObject + } else if (options == null) { + options = kEmptyObject + } else { + validateObject(options, 'options') } - if (stream.readableErrored) { - return stream.readableErrored + validateFunction(callback, 'callback') + validateAbortSignal(options.signal, 'options.signal') + callback = once(callback) + if (isReadableStream(stream) || isWritableStream(stream)) { + return eosWeb(stream, options, callback) } - return (_stream$_readableStat = - (_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === undefined - ? undefined - : _stream$_readableStat2.errored) !== null && _stream$_readableStat !== undefined - ? _stream$_readableStat - : null -} -function isClosed(stream) { if (!isNodeStream(stream)) { - return null - } - if (typeof stream.closed === 'boolean') { - return stream.closed + throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream) } + const readable = + (_options$readable = options.readable) !== null && _options$readable !== undefined + ? _options$readable + : isReadableNodeStream(stream) + const writable = + (_options$writable = options.writable) !== null && _options$writable !== undefined + ? _options$writable + : isWritableNodeStream(stream) const wState = stream._writableState const rState = stream._readableState - if ( - typeof (wState === null || wState === undefined ? undefined : wState.closed) === 'boolean' || - typeof (rState === null || rState === undefined ? undefined : rState.closed) === 'boolean' - ) { - return ( - (wState === null || wState === undefined ? undefined : wState.closed) || - (rState === null || rState === undefined ? undefined : rState.closed) - ) - } - if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) { - return stream._closed + const onlegacyfinish = () => { + if (!stream.writable) { + onfinish() + } } - return null -} -function isOutgoingMessage(stream) { - return ( - typeof stream._closed === 'boolean' && - typeof stream._defaultKeepAlive === 'boolean' && - typeof stream._removedConnection === 'boolean' && - typeof stream._removedContLen === 'boolean' - ) -} -function isServerResponse(stream) { - return typeof stream._sent100 === 'boolean' && isOutgoingMessage(stream) -} -function isServerRequest(stream) { - var _stream$req - return ( - typeof stream._consuming === 'boolean' && - typeof stream._dumped === 'boolean' && - ((_stream$req = stream.req) === null || _stream$req === undefined ? undefined : _stream$req.upgradeOrConnect) === - undefined - ) -} -function willEmitClose(stream) { - if (!isNodeStream(stream)) return null - const wState = stream._writableState - const rState = stream._readableState - const state = wState || rState - return ( - (!state && isServerResponse(stream)) || !!(state && state.autoDestroy && state.emitClose && state.closed === false) - ) -} -function isDisturbed(stream) { - var _stream$kIsDisturbed - return !!( - stream && - ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== undefined - ? _stream$kIsDisturbed - : stream.readableDidRead || stream.readableAborted) - ) + + // TODO (ronag): Improve soft detection to include core modules and + // common ecosystem modules that do properly emit 'close' but fail + // this generic check. + let willEmitClose = + _willEmitClose(stream) && isReadableNodeStream(stream) === readable && isWritableNodeStream(stream) === writable + let writableFinished = isWritableFinished(stream, false) + const onfinish = () => { + writableFinished = true + // Stream should not be destroyed here. If it is that + // means that user space is doing something differently and + // we cannot trust willEmitClose. + if (stream.destroyed) { + willEmitClose = false + } + if (willEmitClose && (!stream.readable || readable)) { + return + } + if (!readable || readableFinished) { + callback.call(stream) + } + } + let readableFinished = isReadableFinished(stream, false) + const onend = () => { + readableFinished = true + // Stream should not be destroyed here. If it is that + // means that user space is doing something differently and + // we cannot trust willEmitClose. + if (stream.destroyed) { + willEmitClose = false + } + if (willEmitClose && (!stream.writable || writable)) { + return + } + if (!writable || writableFinished) { + callback.call(stream) + } + } + const onerror = (err) => { + callback.call(stream, err) + } + let closed = isClosed(stream) + const onclose = () => { + closed = true + const errored = isWritableErrored(stream) || isReadableErrored(stream) + if (errored && typeof errored !== 'boolean') { + return callback.call(stream, errored) + } + if (readable && !readableFinished && isReadableNodeStream(stream, true)) { + if (!isReadableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()) + } + if (writable && !writableFinished) { + if (!isWritableFinished(stream, false)) return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE()) + } + callback.call(stream) + } + const onclosed = () => { + closed = true + const errored = isWritableErrored(stream) || isReadableErrored(stream) + if (errored && typeof errored !== 'boolean') { + return callback.call(stream, errored) + } + callback.call(stream) + } + const onrequest = () => { + stream.req.on('finish', onfinish) + } + if (isRequest(stream)) { + stream.on('complete', onfinish) + if (!willEmitClose) { + stream.on('abort', onclose) + } + if (stream.req) { + onrequest() + } else { + stream.on('request', onrequest) + } + } else if (writable && !wState) { + // legacy streams + stream.on('end', onlegacyfinish) + stream.on('close', onlegacyfinish) + } + + // Not all streams will emit 'close' after 'aborted'. + if (!willEmitClose && typeof stream.aborted === 'boolean') { + stream.on('aborted', onclose) + } + stream.on('end', onend) + stream.on('finish', onfinish) + if (options.error !== false) { + stream.on('error', onerror) + } + stream.on('close', onclose) + if (closed) { + process.nextTick(onclose) + } else if ( + (wState !== null && wState !== undefined && wState.errorEmitted) || + (rState !== null && rState !== undefined && rState.errorEmitted) + ) { + if (!willEmitClose) { + process.nextTick(onclosed) + } + } else if ( + !readable && + (!willEmitClose || isReadable(stream)) && + (writableFinished || isWritable(stream) === false) + ) { + process.nextTick(onclosed) + } else if ( + !writable && + (!willEmitClose || isWritable(stream)) && + (readableFinished || isReadable(stream) === false) + ) { + process.nextTick(onclosed) + } else if (rState && stream.req && stream.aborted) { + process.nextTick(onclosed) + } + const cleanup = () => { + callback = nop + stream.removeListener('aborted', onclose) + stream.removeListener('complete', onfinish) + stream.removeListener('abort', onclose) + stream.removeListener('request', onrequest) + if (stream.req) stream.req.removeListener('finish', onfinish) + stream.removeListener('end', onlegacyfinish) + stream.removeListener('close', onlegacyfinish) + stream.removeListener('finish', onfinish) + stream.removeListener('end', onend) + stream.removeListener('error', onerror) + stream.removeListener('close', onclose) + } + if (options.signal && !closed) { + const abort = () => { + // Keep it because cleanup removes it. + const endCallback = callback + cleanup() + endCallback.call( + stream, + new AbortError(undefined, { + cause: options.signal.reason + }) + ) + } + if (options.signal.aborted) { + process.nextTick(abort) + } else { + addAbortListener = addAbortListener || (__nccwpck_require__(5915).addAbortListener) + const disposable = addAbortListener(options.signal, abort) + const originalCallback = callback + callback = once((...args) => { + disposable[SymbolDispose]() + originalCallback.apply(stream, args) + }) + } + } + return cleanup } -function isErrored(stream) { - var _ref, - _ref2, - _ref3, - _ref4, - _ref5, - _stream$kIsErrored, - _stream$_readableStat3, - _stream$_writableStat3, - _stream$_readableStat4, - _stream$_writableStat4 - return !!( - stream && - ((_ref = - (_ref2 = - (_ref3 = - (_ref4 = - (_ref5 = - (_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== undefined - ? _stream$kIsErrored - : stream.readableErrored) !== null && _ref5 !== undefined - ? _ref5 - : stream.writableErrored) !== null && _ref4 !== undefined - ? _ref4 - : (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === undefined - ? undefined - : _stream$_readableStat3.errorEmitted) !== null && _ref3 !== undefined - ? _ref3 - : (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === undefined - ? undefined - : _stream$_writableStat3.errorEmitted) !== null && _ref2 !== undefined - ? _ref2 - : (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === undefined - ? undefined - : _stream$_readableStat4.errored) !== null && _ref !== undefined - ? _ref - : (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === undefined - ? undefined - : _stream$_writableStat4.errored) - ) +function eosWeb(stream, options, callback) { + let isAborted = false + let abort = nop + if (options.signal) { + abort = () => { + isAborted = true + callback.call( + stream, + new AbortError(undefined, { + cause: options.signal.reason + }) + ) + } + if (options.signal.aborted) { + process.nextTick(abort) + } else { + addAbortListener = addAbortListener || (__nccwpck_require__(5915).addAbortListener) + const disposable = addAbortListener(options.signal, abort) + const originalCallback = callback + callback = once((...args) => { + disposable[SymbolDispose]() + originalCallback.apply(stream, args) + }) + } + } + const resolverFn = (...args) => { + if (!isAborted) { + process.nextTick(() => callback.apply(stream, args)) + } + } + PromisePrototypeThen(stream[kIsClosedPromise].promise, resolverFn, resolverFn) + return nop } -module.exports = { - isDestroyed, - kIsDestroyed, - isDisturbed, - kIsDisturbed, - isErrored, - kIsErrored, - isReadable, - kIsReadable, - kIsClosedPromise, - kControllerErrorFunction, - kIsWritable, - isClosed, - isDuplexNodeStream, - isFinished, - isIterable, - isReadableNodeStream, - isReadableStream, - isReadableEnded, - isReadableFinished, - isReadableErrored, - isNodeStream, - isWebStream, - isWritable, - isWritableNodeStream, - isWritableStream, - isWritableEnded, - isWritableFinished, - isWritableErrored, - isServerRequest, - isServerResponse, - willEmitClose, - isTransformStream +function finished(stream, opts) { + var _opts + let autoCleanup = false + if (opts === null) { + opts = kEmptyObject + } + if ((_opts = opts) !== null && _opts !== undefined && _opts.cleanup) { + validateBoolean(opts.cleanup, 'cleanup') + autoCleanup = opts.cleanup + } + return new Promise((resolve, reject) => { + const cleanup = eos(stream, opts, (err) => { + if (autoCleanup) { + cleanup() + } + if (err) { + reject(err) + } else { + resolve() + } + }) + }) } +module.exports = eos +module.exports.finished = finished /***/ }), -/***/ 99601: +/***/ 80169: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; + + /* replacement start */ -const process = __nccwpck_require__(54340) +const process = __nccwpck_require__(76341) /* replacement end */ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. -;('use strict') -const { - ArrayPrototypeSlice, - Error, - FunctionPrototypeSymbolHasInstance, - ObjectDefineProperty, - ObjectDefineProperties, - ObjectSetPrototypeOf, - StringPrototypeToLowerCase, - Symbol, - SymbolHasInstance -} = __nccwpck_require__(62141) -module.exports = Writable -Writable.WritableState = WritableState -const { EventEmitter: EE } = __nccwpck_require__(82361) -const Stream = (__nccwpck_require__(26501).Stream) +const { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = __nccwpck_require__(62646) const { Buffer } = __nccwpck_require__(14300) -const destroyImpl = __nccwpck_require__(47493) -const { addAbortSignal } = __nccwpck_require__(50275) -const { getHighWaterMark, getDefaultHighWaterMark } = __nccwpck_require__(89025) -const { - ERR_INVALID_ARG_TYPE, - ERR_METHOD_NOT_IMPLEMENTED, - ERR_MULTIPLE_CALLBACK, - ERR_STREAM_CANNOT_PIPE, - ERR_STREAM_DESTROYED, - ERR_STREAM_ALREADY_FINISHED, - ERR_STREAM_NULL_VALUES, - ERR_STREAM_WRITE_AFTER_END, - ERR_UNKNOWN_ENCODING -} = (__nccwpck_require__(49939).codes) -const { errorOrDestroy } = destroyImpl -ObjectSetPrototypeOf(Writable.prototype, Stream.prototype) -ObjectSetPrototypeOf(Writable, Stream) -function nop() {} -const kOnFinished = Symbol('kOnFinished') -function WritableState(options, stream, isDuplex) { - // Duplex streams are both readable and writable, but share - // the same options object. - // However, some cases require setting options to different - // values for the readable and the writable sides of the duplex stream, - // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. - if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof __nccwpck_require__(7280) +const { ERR_INVALID_ARG_TYPE, ERR_STREAM_NULL_VALUES } = (__nccwpck_require__(72184).codes) +function from(Readable, iterable, opts) { + let iterator + if (typeof iterable === 'string' || iterable instanceof Buffer) { + return new Readable({ + objectMode: true, + ...opts, + read() { + this.push(iterable) + this.push(null) + } + }) + } + let isAsync + if (iterable && iterable[SymbolAsyncIterator]) { + isAsync = true + iterator = iterable[SymbolAsyncIterator]() + } else if (iterable && iterable[SymbolIterator]) { + isAsync = false + iterator = iterable[SymbolIterator]() + } else { + throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable) + } + const readable = new Readable({ + objectMode: true, + highWaterMark: 1, + // TODO(ronag): What options should be allowed? + ...opts + }) - // Object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!(options && options.objectMode) - if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode) + // Flag to protect against _read + // being called before last iteration completion. + let reading = false + readable._read = function () { + if (!reading) { + reading = true + next() + } + } + readable._destroy = function (error, cb) { + PromisePrototypeThen( + close(error), + () => process.nextTick(cb, error), + // nextTick is here in case cb throws + (e) => process.nextTick(cb, e || error) + ) + } + async function close(error) { + const hadError = error !== undefined && error !== null + const hasThrow = typeof iterator.throw === 'function' + if (hadError && hasThrow) { + const { value, done } = await iterator.throw(error) + await value + if (done) { + return + } + } + if (typeof iterator.return === 'function') { + const { value } = await iterator.return() + await value + } + } + async function next() { + for (;;) { + try { + const { value, done } = isAsync ? await iterator.next() : iterator.next() + if (done) { + readable.push(null) + } else { + const res = value && typeof value.then === 'function' ? await value : value + if (res === null) { + reading = false + throw new ERR_STREAM_NULL_VALUES() + } else if (readable.push(res)) { + continue + } else { + reading = false + } + } + } catch (err) { + readable.destroy(err) + } + break + } + } + return readable +} +module.exports = from - // The point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write(). - this.highWaterMark = options - ? getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex) - : getDefaultHighWaterMark(false) - // if _final has been called. - this.finalCalled = false +/***/ }), - // drain event flag. - this.needDrain = false - // At the start of calling end() - this.ending = false - // When end() has been called, and returned. - this.ended = false - // When 'finish' is emitted. - this.finished = false +/***/ 27722: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Has it been destroyed - this.destroyed = false +"use strict"; - // Should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - const noDecode = !!(options && options.decodeStrings === false) - this.decodeStrings = !noDecode - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = (options && options.defaultEncoding) || 'utf8' +const { ArrayIsArray, ObjectSetPrototypeOf } = __nccwpck_require__(62646) +const { EventEmitter: EE } = __nccwpck_require__(82361) +function Stream(opts) { + EE.call(this, opts) +} +ObjectSetPrototypeOf(Stream.prototype, EE.prototype) +ObjectSetPrototypeOf(Stream, EE) +Stream.prototype.pipe = function (dest, options) { + const source = this + function ondata(chunk) { + if (dest.writable && dest.write(chunk) === false && source.pause) { + source.pause() + } + } + source.on('data', ondata) + function ondrain() { + if (source.readable && source.resume) { + source.resume() + } + } + dest.on('drain', ondrain) - // Not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0 - - // A flag to see when we're in the middle of a write. - this.writing = false - - // When true all writes will be buffered until .uncork() call. - this.corked = 0 - - // A flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true - - // A flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false - - // The callback that's passed to _write(chunk, cb). - this.onwrite = onwrite.bind(undefined, stream) - - // The callback that the user supplies to write(chunk, encoding, cb). - this.writecb = null - - // The amount that is being written when _write is called. - this.writelen = 0 + // If the 'end' option is not supplied, dest.end() will be called when + // source gets the 'end' or 'close' events. Only dest.end() once. + if (!dest._isStdio && (!options || options.end !== false)) { + source.on('end', onend) + source.on('close', onclose) + } + let didOnEnd = false + function onend() { + if (didOnEnd) return + didOnEnd = true + dest.end() + } + function onclose() { + if (didOnEnd) return + didOnEnd = true + if (typeof dest.destroy === 'function') dest.destroy() + } - // Storage for data passed to the afterWrite() callback in case of - // synchronous _write() completion. - this.afterWriteTickInfo = null - resetBuffer(this) + // Don't leave dangling pipes when there are errors. + function onerror(er) { + cleanup() + if (EE.listenerCount(this, 'error') === 0) { + this.emit('error', er) + } + } + prependListener(source, 'error', onerror) + prependListener(dest, 'error', onerror) - // Number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted. - this.pendingcb = 0 + // Remove all the event listeners that were added. + function cleanup() { + source.removeListener('data', ondata) + dest.removeListener('drain', ondrain) + source.removeListener('end', onend) + source.removeListener('close', onclose) + source.removeListener('error', onerror) + dest.removeListener('error', onerror) + source.removeListener('end', cleanup) + source.removeListener('close', cleanup) + dest.removeListener('close', cleanup) + } + source.on('end', cleanup) + source.on('close', cleanup) + dest.on('close', cleanup) + dest.emit('pipe', source) - // Stream is still being constructed and cannot be - // destroyed until construction finished or failed. - // Async construction is opt in, therefore we start as - // constructed. - this.constructed = true + // Allow for unix-like usage: A.pipe(B).pipe(C) + return dest +} +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn) - // Emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams. - this.prefinished = false + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn) + else if (ArrayIsArray(emitter._events[event])) emitter._events[event].unshift(fn) + else emitter._events[event] = [fn, emitter._events[event]] +} +module.exports = { + Stream, + prependListener +} - // True if the error was already emitted and should not be thrown again. - this.errorEmitted = false - // Should close be emitted on destroy. Defaults to true. - this.emitClose = !options || options.emitClose !== false +/***/ }), - // Should .destroy() be called after 'finish' (and potentially 'end'). - this.autoDestroy = !options || options.autoDestroy !== false +/***/ 23957: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - // Indicates whether the stream has errored. When true all write() calls - // should return false. This is needed since when autoDestroy - // is disabled we need a way to tell whether the stream has failed. - this.errored = null +"use strict"; - // Indicates whether the stream has finished destroying. - this.closed = false - // True if close has been emitted or would have been emitted - // depending on emitClose. - this.closeEmitted = false - this[kOnFinished] = [] -} -function resetBuffer(state) { - state.buffered = [] - state.bufferedIndex = 0 - state.allBuffers = true - state.allNoop = true -} -WritableState.prototype.getBuffer = function getBuffer() { - return ArrayPrototypeSlice(this.buffered, this.bufferedIndex) -} -ObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', { - __proto__: null, - get() { - return this.buffered.length - this.bufferedIndex +const AbortController = globalThis.AbortController || (__nccwpck_require__(71935).AbortController) +const { + codes: { ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE }, + AbortError +} = __nccwpck_require__(72184) +const { validateAbortSignal, validateInteger, validateObject } = __nccwpck_require__(89371) +const kWeakHandler = (__nccwpck_require__(62646).Symbol)('kWeak') +const kResistStopPropagation = (__nccwpck_require__(62646).Symbol)('kResistStopPropagation') +const { finished } = __nccwpck_require__(51604) +const staticCompose = __nccwpck_require__(70724) +const { addAbortSignalNoValidate } = __nccwpck_require__(69832) +const { isWritable, isNodeStream } = __nccwpck_require__(58650) +const { deprecate } = __nccwpck_require__(5915) +const { + ArrayPrototypePush, + Boolean, + MathFloor, + Number, + NumberIsNaN, + Promise, + PromiseReject, + PromiseResolve, + PromisePrototypeThen, + Symbol +} = __nccwpck_require__(62646) +const kEmpty = Symbol('kEmpty') +const kEof = Symbol('kEof') +function compose(stream, options) { + if (options != null) { + validateObject(options, 'options') } -}) -function Writable(options) { - // Writable ctor is applied to Duplexes, too. - // `realHasInstance` is necessary because using plain `instanceof` - // would return false, as no `_writableState` property is attached. - - // Trying to use the custom `instanceof` for Writable here will also break the - // Node.js LazyTransform implementation, which has a non-trivial getter for - // `_writableState` that would lead to infinite recursion. - - // Checking for a Stream.Duplex instance is faster here instead of inside - // the WritableState constructor, at least with V8 6.5. - const isDuplex = this instanceof __nccwpck_require__(7280) - if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options) - this._writableState = new WritableState(options, this, isDuplex) - if (options) { - if (typeof options.write === 'function') this._write = options.write - if (typeof options.writev === 'function') this._writev = options.writev - if (typeof options.destroy === 'function') this._destroy = options.destroy - if (typeof options.final === 'function') this._final = options.final - if (typeof options.construct === 'function') this._construct = options.construct - if (options.signal) addAbortSignal(options.signal, this) + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } - Stream.call(this, options) - destroyImpl.construct(this, () => { - const state = this._writableState - if (!state.writing) { - clearBuffer(this, state) - } - finishMaybe(this, state) - }) -} -ObjectDefineProperty(Writable, SymbolHasInstance, { - __proto__: null, - value: function (object) { - if (FunctionPrototypeSymbolHasInstance(this, object)) return true - if (this !== Writable) return false - return object && object._writableState instanceof WritableState + if (isNodeStream(stream) && !isWritable(stream)) { + throw new ERR_INVALID_ARG_VALUE('stream', stream, 'must be writable') } -}) - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()) + const composedStream = staticCompose(this, stream) + if (options !== null && options !== undefined && options.signal) { + // Not validating as we already validated before + addAbortSignalNoValidate(options.signal, composedStream) + } + return composedStream } -function _write(stream, chunk, encoding, cb) { - const state = stream._writableState - if (typeof encoding === 'function') { - cb = encoding - encoding = state.defaultEncoding - } else { - if (!encoding) encoding = state.defaultEncoding - else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding) - if (typeof cb !== 'function') cb = nop +function map(fn, options) { + if (typeof fn !== 'function') { + throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) } - if (chunk === null) { - throw new ERR_STREAM_NULL_VALUES() - } else if (!state.objectMode) { - if (typeof chunk === 'string') { - if (state.decodeStrings !== false) { - chunk = Buffer.from(chunk, encoding) - encoding = 'buffer' - } - } else if (chunk instanceof Buffer) { - encoding = 'buffer' - } else if (Stream._isUint8Array(chunk)) { - chunk = Stream._uint8ArrayToBuffer(chunk) - encoding = 'buffer' - } else { - throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk) - } + if (options != null) { + validateObject(options, 'options') } - let err - if (state.ending) { - err = new ERR_STREAM_WRITE_AFTER_END() - } else if (state.destroyed) { - err = new ERR_STREAM_DESTROYED('write') + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } - if (err) { - process.nextTick(cb, err) - errorOrDestroy(stream, err, true) - return err + let concurrency = 1 + if ((options === null || options === undefined ? undefined : options.concurrency) != null) { + concurrency = MathFloor(options.concurrency) } - state.pendingcb++ - return writeOrBuffer(stream, state, chunk, encoding, cb) -} -Writable.prototype.write = function (chunk, encoding, cb) { - return _write(this, chunk, encoding, cb) === true -} -Writable.prototype.cork = function () { - this._writableState.corked++ -} -Writable.prototype.uncork = function () { - const state = this._writableState - if (state.corked) { - state.corked-- - if (!state.writing) clearBuffer(this, state) + let highWaterMark = concurrency - 1 + if ((options === null || options === undefined ? undefined : options.highWaterMark) != null) { + highWaterMark = MathFloor(options.highWaterMark) } -} -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = StringPrototypeToLowerCase(encoding) - if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding) - this._writableState.defaultEncoding = encoding - return this -} - -// If we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, chunk, encoding, callback) { - const len = state.objectMode ? 1 : chunk.length - state.length += len - - // stream._write resets state.length - const ret = state.length < state.highWaterMark - // We must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true - if (state.writing || state.corked || state.errored || !state.constructed) { - state.buffered.push({ - chunk, - encoding, - callback - }) - if (state.allBuffers && encoding !== 'buffer') { - state.allBuffers = false - } - if (state.allNoop && callback !== nop) { - state.allNoop = false + validateInteger(concurrency, 'options.concurrency', 1) + validateInteger(highWaterMark, 'options.highWaterMark', 0) + highWaterMark += concurrency + return async function* map() { + const signal = (__nccwpck_require__(5915).AbortSignalAny)( + [options === null || options === undefined ? undefined : options.signal].filter(Boolean) + ) + const stream = this + const queue = [] + const signalOpt = { + signal } - } else { - state.writelen = len - state.writecb = callback - state.writing = true - state.sync = true - stream._write(chunk, encoding, state.onwrite) - state.sync = false - } - - // Return false if errored or destroyed in order to break - // any synchronous while(stream.write(data)) loops. - return ret && !state.errored && !state.destroyed -} -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len - state.writecb = cb - state.writing = true - state.sync = true - if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write')) - else if (writev) stream._writev(chunk, state.onwrite) - else stream._write(chunk, encoding, state.onwrite) - state.sync = false -} -function onwriteError(stream, state, er, cb) { - --state.pendingcb - cb(er) - // Ensure callbacks are invoked even when autoDestroy is - // not enabled. Passing `er` here doesn't make sense since - // it's related to one specific write, not to the buffered - // writes. - errorBuffer(state) - // This can emit error, but error must always follow cb. - errorOrDestroy(stream, er) -} -function onwrite(stream, er) { - const state = stream._writableState - const sync = state.sync - const cb = state.writecb - if (typeof cb !== 'function') { - errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK()) - return - } - state.writing = false - state.writecb = null - state.length -= state.writelen - state.writelen = 0 - if (er) { - // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 - er.stack // eslint-disable-line no-unused-expressions - - if (!state.errored) { - state.errored = er + let next + let resume + let done = false + let cnt = 0 + function onCatch() { + done = true + afterItemProcessed() } - - // In case of duplex streams we need to notify the readable side of the - // error. - if (stream._readableState && !stream._readableState.errored) { - stream._readableState.errored = er + function afterItemProcessed() { + cnt -= 1 + maybeResume() } - if (sync) { - process.nextTick(onwriteError, stream, state, er, cb) - } else { - onwriteError(stream, state, er, cb) + function maybeResume() { + if (resume && !done && cnt < concurrency && queue.length < highWaterMark) { + resume() + resume = null + } } - } else { - if (state.buffered.length > state.bufferedIndex) { - clearBuffer(stream, state) + async function pump() { + try { + for await (let val of stream) { + if (done) { + return + } + if (signal.aborted) { + throw new AbortError() + } + try { + val = fn(val, signalOpt) + if (val === kEmpty) { + continue + } + val = PromiseResolve(val) + } catch (err) { + val = PromiseReject(err) + } + cnt += 1 + PromisePrototypeThen(val, afterItemProcessed, onCatch) + queue.push(val) + if (next) { + next() + next = null + } + if (!done && (queue.length >= highWaterMark || cnt >= concurrency)) { + await new Promise((resolve) => { + resume = resolve + }) + } + } + queue.push(kEof) + } catch (err) { + const val = PromiseReject(err) + PromisePrototypeThen(val, afterItemProcessed, onCatch) + queue.push(val) + } finally { + done = true + if (next) { + next() + next = null + } + } } - if (sync) { - // It is a common case that the callback passed to .write() is always - // the same. In that case, we do not schedule a new nextTick(), but - // rather just increase a counter, to improve performance and avoid - // memory allocations. - if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) { - state.afterWriteTickInfo.count++ - } else { - state.afterWriteTickInfo = { - count: 1, - cb, - stream, - state + pump() + try { + while (true) { + while (queue.length > 0) { + const val = await queue[0] + if (val === kEof) { + return + } + if (signal.aborted) { + throw new AbortError() + } + if (val !== kEmpty) { + yield val + } + queue.shift() + maybeResume() } - process.nextTick(afterWriteTick, state.afterWriteTickInfo) + await new Promise((resolve) => { + next = resolve + }) + } + } finally { + done = true + if (resume) { + resume() + resume = null } - } else { - afterWrite(stream, state, 1, cb) } - } -} -function afterWriteTick({ stream, state, count, cb }) { - state.afterWriteTickInfo = null - return afterWrite(stream, state, count, cb) + }.call(this) } -function afterWrite(stream, state, count, cb) { - const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain - if (needDrain) { - state.needDrain = false - stream.emit('drain') - } - while (count-- > 0) { - state.pendingcb-- - cb() - } - if (state.destroyed) { - errorBuffer(state) +function asIndexedPairs(options = undefined) { + if (options != null) { + validateObject(options, 'options') } - finishMaybe(stream, state) -} - -// If there's something in the buffer waiting, then invoke callbacks. -function errorBuffer(state) { - if (state.writing) { - return + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } - for (let n = state.bufferedIndex; n < state.buffered.length; ++n) { - var _state$errored - const { chunk, callback } = state.buffered[n] - const len = state.objectMode ? 1 : chunk.length - state.length -= len - callback( - (_state$errored = state.errored) !== null && _state$errored !== undefined - ? _state$errored - : new ERR_STREAM_DESTROYED('write') - ) + return async function* asIndexedPairs() { + let index = 0 + for await (const val of this) { + var _options$signal + if ( + options !== null && + options !== undefined && + (_options$signal = options.signal) !== null && + _options$signal !== undefined && + _options$signal.aborted + ) { + throw new AbortError({ + cause: options.signal.reason + }) + } + yield [index++, val] + } + }.call(this) +} +async function some(fn, options = undefined) { + for await (const unused of filter.call(this, fn, options)) { + return true } - const onfinishCallbacks = state[kOnFinished].splice(0) - for (let i = 0; i < onfinishCallbacks.length; i++) { - var _state$errored2 - onfinishCallbacks[i]( - (_state$errored2 = state.errored) !== null && _state$errored2 !== undefined - ? _state$errored2 - : new ERR_STREAM_DESTROYED('end') - ) + return false +} +async function every(fn, options = undefined) { + if (typeof fn !== 'function') { + throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) } - resetBuffer(state) + // https://en.wikipedia.org/wiki/De_Morgan%27s_laws + return !(await some.call( + this, + async (...args) => { + return !(await fn(...args)) + }, + options + )) } - -// If there's something in the buffer waiting, then process it. -function clearBuffer(stream, state) { - if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) { - return +async function find(fn, options) { + for await (const result of filter.call(this, fn, options)) { + return result } - const { buffered, bufferedIndex, objectMode } = state - const bufferedLength = buffered.length - bufferedIndex - if (!bufferedLength) { - return + return undefined +} +async function forEach(fn, options) { + if (typeof fn !== 'function') { + throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) } - let i = bufferedIndex - state.bufferProcessing = true - if (bufferedLength > 1 && stream._writev) { - state.pendingcb -= bufferedLength - 1 - const callback = state.allNoop - ? nop - : (err) => { - for (let n = i; n < buffered.length; ++n) { - buffered[n].callback(err) - } - } - // Make a copy of `buffered` if it's going to be used by `callback` above, - // since `doWrite` will mutate the array. - const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i) - chunks.allBuffers = state.allBuffers - doWrite(stream, state, true, state.length, chunks, '', callback) - resetBuffer(state) - } else { - do { - const { chunk, encoding, callback } = buffered[i] - buffered[i++] = null - const len = objectMode ? 1 : chunk.length - doWrite(stream, state, false, len, chunk, encoding, callback) - } while (i < buffered.length && !state.writing) - if (i === buffered.length) { - resetBuffer(state) - } else if (i > 256) { - buffered.splice(0, i) - state.bufferedIndex = 0 - } else { - state.bufferedIndex = i + async function forEachFn(value, options) { + await fn(value, options) + return kEmpty + } + // eslint-disable-next-line no-unused-vars + for await (const unused of map.call(this, forEachFn, options)); +} +function filter(fn, options) { + if (typeof fn !== 'function') { + throw new ERR_INVALID_ARG_TYPE('fn', ['Function', 'AsyncFunction'], fn) + } + async function filterFn(value, options) { + if (await fn(value, options)) { + return value } + return kEmpty } - state.bufferProcessing = false + return map.call(this, filterFn, options) } -Writable.prototype._write = function (chunk, encoding, cb) { - if (this._writev) { - this._writev( - [ - { - chunk, - encoding - } - ], - cb - ) - } else { - throw new ERR_METHOD_NOT_IMPLEMENTED('_write()') + +// Specific to provide better error to reduce since the argument is only +// missing if the stream has no items in it - but the code is still appropriate +class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS { + constructor() { + super('reduce') + this.message = 'Reduce of an empty stream requires an initial value' } } -Writable.prototype._writev = null -Writable.prototype.end = function (chunk, encoding, cb) { - const state = this._writableState - if (typeof chunk === 'function') { - cb = chunk - chunk = null - encoding = null - } else if (typeof encoding === 'function') { - cb = encoding - encoding = null +async function reduce(reducer, initialValue, options) { + var _options$signal2 + if (typeof reducer !== 'function') { + throw new ERR_INVALID_ARG_TYPE('reducer', ['Function', 'AsyncFunction'], reducer) } - let err - if (chunk !== null && chunk !== undefined) { - const ret = _write(this, chunk, encoding) - if (ret instanceof Error) { - err = ret - } + if (options != null) { + validateObject(options, 'options') } - - // .end() fully uncorks. - if (state.corked) { - state.corked = 1 - this.uncork() + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } - if (err) { - // Do nothing... - } else if (!state.errored && !state.ending) { - // This is forgiving in terms of unnecessary calls to end() and can hide - // logic errors. However, usually such errors are harmless and causing a - // hard error can be disproportionately destructive. It is not always - // trivial for the user to determine whether end() needs to be called - // or not. - - state.ending = true - finishMaybe(this, state, true) - state.ended = true - } else if (state.finished) { - err = new ERR_STREAM_ALREADY_FINISHED('end') - } else if (state.destroyed) { - err = new ERR_STREAM_DESTROYED('end') + let hasInitialValue = arguments.length > 1 + if ( + options !== null && + options !== undefined && + (_options$signal2 = options.signal) !== null && + _options$signal2 !== undefined && + _options$signal2.aborted + ) { + const err = new AbortError(undefined, { + cause: options.signal.reason + }) + this.once('error', () => {}) // The error is already propagated + await finished(this.destroy(err)) + throw err } - if (typeof cb === 'function') { - if (err || state.finished) { - process.nextTick(cb, err) - } else { - state[kOnFinished].push(cb) + const ac = new AbortController() + const signal = ac.signal + if (options !== null && options !== undefined && options.signal) { + const opts = { + once: true, + [kWeakHandler]: this, + [kResistStopPropagation]: true } + options.signal.addEventListener('abort', () => ac.abort(), opts) } - return this -} -function needFinish(state) { - return ( - state.ending && - !state.destroyed && - state.constructed && - state.length === 0 && - !state.errored && - state.buffered.length === 0 && - !state.finished && - !state.writing && - !state.errorEmitted && - !state.closeEmitted - ) -} -function callFinal(stream, state) { - let called = false - function onFinish(err) { - if (called) { - errorOrDestroy(stream, err !== null && err !== undefined ? err : ERR_MULTIPLE_CALLBACK()) - return - } - called = true - state.pendingcb-- - if (err) { - const onfinishCallbacks = state[kOnFinished].splice(0) - for (let i = 0; i < onfinishCallbacks.length; i++) { - onfinishCallbacks[i](err) + let gotAnyItemFromStream = false + try { + for await (const value of this) { + var _options$signal3 + gotAnyItemFromStream = true + if ( + options !== null && + options !== undefined && + (_options$signal3 = options.signal) !== null && + _options$signal3 !== undefined && + _options$signal3.aborted + ) { + throw new AbortError() + } + if (!hasInitialValue) { + initialValue = value + hasInitialValue = true + } else { + initialValue = await reducer(initialValue, value, { + signal + }) } - errorOrDestroy(stream, err, state.sync) - } else if (needFinish(state)) { - state.prefinished = true - stream.emit('prefinish') - // Backwards compat. Don't check state.sync here. - // Some streams assume 'finish' will be emitted - // asynchronously relative to _final callback. - state.pendingcb++ - process.nextTick(finish, stream, state) } + if (!gotAnyItemFromStream && !hasInitialValue) { + throw new ReduceAwareErrMissingArgs() + } + } finally { + ac.abort() } - state.sync = true - state.pendingcb++ - try { - stream._final(onFinish) - } catch (err) { - onFinish(err) - } - state.sync = false + return initialValue } -function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === 'function' && !state.destroyed) { - state.finalCalled = true - callFinal(stream, state) - } else { - state.prefinished = true - stream.emit('prefinish') +async function toArray(options) { + if (options != null) { + validateObject(options, 'options') + } + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') + } + const result = [] + for await (const val of this) { + var _options$signal4 + if ( + options !== null && + options !== undefined && + (_options$signal4 = options.signal) !== null && + _options$signal4 !== undefined && + _options$signal4.aborted + ) { + throw new AbortError(undefined, { + cause: options.signal.reason + }) } + ArrayPrototypePush(result, val) } + return result } -function finishMaybe(stream, state, sync) { - if (needFinish(state)) { - prefinish(stream, state) - if (state.pendingcb === 0) { - if (sync) { - state.pendingcb++ - process.nextTick( - (stream, state) => { - if (needFinish(state)) { - finish(stream, state) - } else { - state.pendingcb-- - } - }, - stream, - state - ) - } else if (needFinish(state)) { - state.pendingcb++ - finish(stream, state) - } +function flatMap(fn, options) { + const values = map.call(this, fn, options) + return async function* flatMap() { + for await (const val of values) { + yield* val } - } + }.call(this) } -function finish(stream, state) { - state.pendingcb-- - state.finished = true - const onfinishCallbacks = state[kOnFinished].splice(0) - for (let i = 0; i < onfinishCallbacks.length; i++) { - onfinishCallbacks[i]() +function toIntegerOrInfinity(number) { + // We coerce here to align with the spec + // https://github.com/tc39/proposal-iterator-helpers/issues/169 + number = Number(number) + if (NumberIsNaN(number)) { + return 0 } - stream.emit('finish') - if (state.autoDestroy) { - // In case of duplex streams we need a way to detect - // if the readable side is ready for autoDestroy as well. - const rState = stream._readableState - const autoDestroy = - !rState || - (rState.autoDestroy && - // We don't expect the readable to ever 'end' - // if readable is explicitly set to false. - (rState.endEmitted || rState.readable === false)) - if (autoDestroy) { - stream.destroy() - } + if (number < 0) { + throw new ERR_OUT_OF_RANGE('number', '>= 0', number) } + return number } -ObjectDefineProperties(Writable.prototype, { - closed: { - __proto__: null, - get() { - return this._writableState ? this._writableState.closed : false +function drop(number, options = undefined) { + if (options != null) { + validateObject(options, 'options') + } + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') + } + number = toIntegerOrInfinity(number) + return async function* drop() { + var _options$signal5 + if ( + options !== null && + options !== undefined && + (_options$signal5 = options.signal) !== null && + _options$signal5 !== undefined && + _options$signal5.aborted + ) { + throw new AbortError() } - }, - destroyed: { - __proto__: null, - get() { - return this._writableState ? this._writableState.destroyed : false - }, - set(value) { - // Backward compatibility, the user is explicitly managing destroyed. - if (this._writableState) { - this._writableState.destroyed = value + for await (const val of this) { + var _options$signal6 + if ( + options !== null && + options !== undefined && + (_options$signal6 = options.signal) !== null && + _options$signal6 !== undefined && + _options$signal6.aborted + ) { + throw new AbortError() } - } - }, - writable: { - __proto__: null, - get() { - const w = this._writableState - // w.writable === false means that this is part of a Duplex stream - // where the writable side was disabled upon construction. - // Compat. The user might manually disable writable side through - // deprecated setter. - return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended - }, - set(val) { - // Backwards compatible. - if (this._writableState) { - this._writableState.writable = !!val + if (number-- <= 0) { + yield val } } - }, - writableFinished: { - __proto__: null, - get() { - return this._writableState ? this._writableState.finished : false - } - }, - writableObjectMode: { - __proto__: null, - get() { - return this._writableState ? this._writableState.objectMode : false - } - }, - writableBuffer: { - __proto__: null, - get() { - return this._writableState && this._writableState.getBuffer() - } - }, - writableEnded: { - __proto__: null, - get() { - return this._writableState ? this._writableState.ending : false - } - }, - writableNeedDrain: { - __proto__: null, - get() { - const wState = this._writableState - if (!wState) return false - return !wState.destroyed && !wState.ending && wState.needDrain - } - }, - writableHighWaterMark: { - __proto__: null, - get() { - return this._writableState && this._writableState.highWaterMark - } - }, - writableCorked: { - __proto__: null, - get() { - return this._writableState ? this._writableState.corked : 0 - } - }, - writableLength: { - __proto__: null, - get() { - return this._writableState && this._writableState.length - } - }, - errored: { - __proto__: null, - enumerable: false, - get() { - return this._writableState ? this._writableState.errored : null - } - }, - writableAborted: { - __proto__: null, - enumerable: false, - get: function () { - return !!( - this._writableState.writable !== false && - (this._writableState.destroyed || this._writableState.errored) && - !this._writableState.finished - ) - } + }.call(this) +} +function take(number, options = undefined) { + if (options != null) { + validateObject(options, 'options') } -}) -const destroy = destroyImpl.destroy -Writable.prototype.destroy = function (err, cb) { - const state = this._writableState - - // Invoke pending callbacks. - if (!state.destroyed && (state.bufferedIndex < state.buffered.length || state[kOnFinished].length)) { - process.nextTick(errorBuffer, state) + if ((options === null || options === undefined ? undefined : options.signal) != null) { + validateAbortSignal(options.signal, 'options.signal') } - destroy.call(this, err, cb) - return this -} -Writable.prototype._undestroy = destroyImpl.undestroy -Writable.prototype._destroy = function (err, cb) { - cb(err) -} -Writable.prototype[EE.captureRejectionSymbol] = function (err) { - this.destroy(err) -} -let webStreamsAdapters + number = toIntegerOrInfinity(number) + return async function* take() { + var _options$signal7 + if ( + options !== null && + options !== undefined && + (_options$signal7 = options.signal) !== null && + _options$signal7 !== undefined && + _options$signal7.aborted + ) { + throw new AbortError() + } + for await (const val of this) { + var _options$signal8 + if ( + options !== null && + options !== undefined && + (_options$signal8 = options.signal) !== null && + _options$signal8 !== undefined && + _options$signal8.aborted + ) { + throw new AbortError() + } + if (number-- > 0) { + yield val + } -// Lazy to avoid circular references -function lazyWebStreams() { - if (webStreamsAdapters === undefined) webStreamsAdapters = {} - return webStreamsAdapters + // Don't get another item from iterator in case we reached the end + if (number <= 0) { + return + } + } + }.call(this) } -Writable.fromWeb = function (writableStream, options) { - return lazyWebStreams().newStreamWritableFromWritableStream(writableStream, options) +module.exports.streamReturningOperators = { + asIndexedPairs: deprecate(asIndexedPairs, 'readable.asIndexedPairs will be removed in a future version.'), + drop, + filter, + flatMap, + map, + take, + compose } -Writable.toWeb = function (streamWritable) { - return lazyWebStreams().newWritableStreamFromStreamWritable(streamWritable) +module.exports.promiseReturningOperators = { + every, + forEach, + reduce, + toArray, + some, + find } /***/ }), -/***/ 95529: +/***/ 12875: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -/* eslint jsdoc/require-jsdoc: "error" */ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. -const { - ArrayIsArray, - ArrayPrototypeIncludes, - ArrayPrototypeJoin, - ArrayPrototypeMap, - NumberIsInteger, - NumberIsNaN, - NumberMAX_SAFE_INTEGER, - NumberMIN_SAFE_INTEGER, - NumberParseInt, - ObjectPrototypeHasOwnProperty, - RegExpPrototypeExec, - String, - StringPrototypeToUpperCase, - StringPrototypeTrim -} = __nccwpck_require__(62141) -const { - hideStackFrames, - codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL } -} = __nccwpck_require__(49939) -const { normalizeEncoding } = __nccwpck_require__(32504) -const { isAsyncFunction, isArrayBufferView } = (__nccwpck_require__(32504).types) -const signals = {} -/** - * @param {*} value - * @returns {boolean} - */ -function isInt32(value) { - return value === (value | 0) +const { ObjectSetPrototypeOf } = __nccwpck_require__(62646) +module.exports = PassThrough +const Transform = __nccwpck_require__(16994) +ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype) +ObjectSetPrototypeOf(PassThrough, Transform) +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options) + Transform.call(this, options) } - -/** - * @param {*} value - * @returns {boolean} - */ -function isUint32(value) { - return value === value >>> 0 +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk) } -const octalReg = /^[0-7]+$/ -const modeDesc = 'must be a 32-bit unsigned integer or an octal string' -/** - * Parse and validate values that will be converted into mode_t (the S_* - * constants). Only valid numbers and octal strings are allowed. They could be - * converted to 32-bit unsigned integers or non-negative signed integers in the - * C++ land, but any value higher than 0o777 will result in platform-specific - * behaviors. - * @param {*} value Values to be validated - * @param {string} name Name of the argument - * @param {number} [def] If specified, will be returned for invalid values - * @returns {number} - */ -function parseFileMode(value, name, def) { - if (typeof value === 'undefined') { - value = def - } - if (typeof value === 'string') { - if (RegExpPrototypeExec(octalReg, value) === null) { - throw new ERR_INVALID_ARG_VALUE(name, value, modeDesc) - } - value = NumberParseInt(value, 8) - } - validateUint32(value, name) - return value -} -/** - * @callback validateInteger - * @param {*} value - * @param {string} name - * @param {number} [min] - * @param {number} [max] - * @returns {asserts value is number} - */ +/***/ }), -/** @type {validateInteger} */ -const validateInteger = hideStackFrames((value, name, min = NumberMIN_SAFE_INTEGER, max = NumberMAX_SAFE_INTEGER) => { - if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value) - if (!NumberIsInteger(value)) throw new ERR_OUT_OF_RANGE(name, 'an integer', value) - if (value < min || value > max) throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value) -}) +/***/ 77866: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -/** - * @callback validateInt32 - * @param {*} value - * @param {string} name - * @param {number} [min] - * @param {number} [max] - * @returns {asserts value is number} - */ +/* replacement start */ -/** @type {validateInt32} */ -const validateInt32 = hideStackFrames((value, name, min = -2147483648, max = 2147483647) => { - // The defaults for min and max correspond to the limits of 32-bit integers. - if (typeof value !== 'number') { - throw new ERR_INVALID_ARG_TYPE(name, 'number', value) - } - if (!NumberIsInteger(value)) { - throw new ERR_OUT_OF_RANGE(name, 'an integer', value) - } - if (value < min || value > max) { - throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value) - } -}) +const process = __nccwpck_require__(76341) -/** - * @callback validateUint32 - * @param {*} value - * @param {string} name - * @param {number|boolean} [positive=false] - * @returns {asserts value is number} - */ +/* replacement end */ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). -/** @type {validateUint32} */ -const validateUint32 = hideStackFrames((value, name, positive = false) => { - if (typeof value !== 'number') { - throw new ERR_INVALID_ARG_TYPE(name, 'number', value) +;('use strict') +const { ArrayIsArray, Promise, SymbolAsyncIterator, SymbolDispose } = __nccwpck_require__(62646) +const eos = __nccwpck_require__(51604) +const { once } = __nccwpck_require__(5915) +const destroyImpl = __nccwpck_require__(80064) +const Duplex = __nccwpck_require__(67799) +const { + aggregateTwoErrors, + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_RETURN_VALUE, + ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED, + ERR_STREAM_PREMATURE_CLOSE + }, + AbortError +} = __nccwpck_require__(72184) +const { validateFunction, validateAbortSignal } = __nccwpck_require__(89371) +const { + isIterable, + isReadable, + isReadableNodeStream, + isNodeStream, + isTransformStream, + isWebStream, + isReadableStream, + isReadableFinished +} = __nccwpck_require__(58650) +const AbortController = globalThis.AbortController || (__nccwpck_require__(71935).AbortController) +let PassThrough +let Readable +let addAbortListener +function destroyer(stream, reading, writing) { + let finished = false + stream.on('close', () => { + finished = true + }) + const cleanup = eos( + stream, + { + readable: reading, + writable: writing + }, + (err) => { + finished = !err + } + ) + return { + destroy: (err) => { + if (finished) return + finished = true + destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe')) + }, + cleanup } - if (!NumberIsInteger(value)) { - throw new ERR_OUT_OF_RANGE(name, 'an integer', value) +} +function popCallback(streams) { + // Streams should never be an empty array. It should always contain at least + // a single stream. Therefore optimize for the average case instead of + // checking for length === 0 as well. + validateFunction(streams[streams.length - 1], 'streams[stream.length - 1]') + return streams.pop() +} +function makeAsyncIterable(val) { + if (isIterable(val)) { + return val + } else if (isReadableNodeStream(val)) { + // Legacy streams are not Iterable. + return fromReadable(val) } - const min = positive ? 1 : 0 - // 2 ** 32 === 4294967296 - const max = 4294967295 - if (value < min || value > max) { - throw new ERR_OUT_OF_RANGE(name, `>= ${min} && <= ${max}`, value) + throw new ERR_INVALID_ARG_TYPE('val', ['Readable', 'Iterable', 'AsyncIterable'], val) +} +async function* fromReadable(val) { + if (!Readable) { + Readable = __nccwpck_require__(83176) } -}) - -/** - * @callback validateString - * @param {*} value - * @param {string} name - * @returns {asserts value is string} - */ - -/** @type {validateString} */ -function validateString(value, name) { - if (typeof value !== 'string') throw new ERR_INVALID_ARG_TYPE(name, 'string', value) + yield* Readable.prototype[SymbolAsyncIterator].call(val) } - -/** - * @callback validateNumber - * @param {*} value - * @param {string} name - * @param {number} [min] - * @param {number} [max] - * @returns {asserts value is number} - */ - -/** @type {validateNumber} */ -function validateNumber(value, name, min = undefined, max) { - if (typeof value !== 'number') throw new ERR_INVALID_ARG_TYPE(name, 'number', value) - if ( - (min != null && value < min) || - (max != null && value > max) || - ((min != null || max != null) && NumberIsNaN(value)) - ) { - throw new ERR_OUT_OF_RANGE( - name, - `${min != null ? `>= ${min}` : ''}${min != null && max != null ? ' && ' : ''}${max != null ? `<= ${max}` : ''}`, - value - ) +async function pumpToNode(iterable, writable, finish, { end }) { + let error + let onresolve = null + const resume = (err) => { + if (err) { + error = err + } + if (onresolve) { + const callback = onresolve + onresolve = null + callback() + } + } + const wait = () => + new Promise((resolve, reject) => { + if (error) { + reject(error) + } else { + onresolve = () => { + if (error) { + reject(error) + } else { + resolve() + } + } + } + }) + writable.on('drain', resume) + const cleanup = eos( + writable, + { + readable: false + }, + resume + ) + try { + if (writable.writableNeedDrain) { + await wait() + } + for await (const chunk of iterable) { + if (!writable.write(chunk)) { + await wait() + } + } + if (end) { + writable.end() + await wait() + } + finish() + } catch (err) { + finish(error !== err ? aggregateTwoErrors(error, err) : err) + } finally { + cleanup() + writable.off('drain', resume) } } - -/** - * @callback validateOneOf - * @template T - * @param {T} value - * @param {string} name - * @param {T[]} oneOf - */ - -/** @type {validateOneOf} */ -const validateOneOf = hideStackFrames((value, name, oneOf) => { - if (!ArrayPrototypeIncludes(oneOf, value)) { - const allowed = ArrayPrototypeJoin( - ArrayPrototypeMap(oneOf, (v) => (typeof v === 'string' ? `'${v}'` : String(v))), - ', ' - ) - const reason = 'must be one of: ' + allowed - throw new ERR_INVALID_ARG_VALUE(name, value, reason) +async function pumpToWeb(readable, writable, finish, { end }) { + if (isTransformStream(writable)) { + writable = writable.writable + } + // https://streams.spec.whatwg.org/#example-manual-write-with-backpressure + const writer = writable.getWriter() + try { + for await (const chunk of readable) { + await writer.ready + writer.write(chunk).catch(() => {}) + } + await writer.ready + if (end) { + await writer.close() + } + finish() + } catch (err) { + try { + await writer.abort(err) + finish(err) + } catch (err) { + finish(err) + } } -}) - -/** - * @callback validateBoolean - * @param {*} value - * @param {string} name - * @returns {asserts value is boolean} - */ - -/** @type {validateBoolean} */ -function validateBoolean(value, name) { - if (typeof value !== 'boolean') throw new ERR_INVALID_ARG_TYPE(name, 'boolean', value) } - -/** - * @param {any} options - * @param {string} key - * @param {boolean} defaultValue - * @returns {boolean} - */ -function getOwnPropertyValueOrDefault(options, key, defaultValue) { - return options == null || !ObjectPrototypeHasOwnProperty(options, key) ? defaultValue : options[key] +function pipeline(...streams) { + return pipelineImpl(streams, once(popCallback(streams))) } - -/** - * @callback validateObject - * @param {*} value - * @param {string} name - * @param {{ - * allowArray?: boolean, - * allowFunction?: boolean, - * nullable?: boolean - * }} [options] - */ - -/** @type {validateObject} */ -const validateObject = hideStackFrames((value, name, options = null) => { - const allowArray = getOwnPropertyValueOrDefault(options, 'allowArray', false) - const allowFunction = getOwnPropertyValueOrDefault(options, 'allowFunction', false) - const nullable = getOwnPropertyValueOrDefault(options, 'nullable', false) - if ( - (!nullable && value === null) || - (!allowArray && ArrayIsArray(value)) || - (typeof value !== 'object' && (!allowFunction || typeof value !== 'function')) - ) { - throw new ERR_INVALID_ARG_TYPE(name, 'Object', value) +function pipelineImpl(streams, callback, opts) { + if (streams.length === 1 && ArrayIsArray(streams[0])) { + streams = streams[0] } -}) - -/** - * @callback validateDictionary - We are using the Web IDL Standard definition - * of "dictionary" here, which means any value - * whose Type is either Undefined, Null, or - * Object (which includes functions). - * @param {*} value - * @param {string} name - * @see https://webidl.spec.whatwg.org/#es-dictionary - * @see https://tc39.es/ecma262/#table-typeof-operator-results - */ - -/** @type {validateDictionary} */ -const validateDictionary = hideStackFrames((value, name) => { - if (value != null && typeof value !== 'object' && typeof value !== 'function') { - throw new ERR_INVALID_ARG_TYPE(name, 'a dictionary', value) + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams') } -}) - -/** - * @callback validateArray - * @param {*} value - * @param {string} name - * @param {number} [minLength] - * @returns {asserts value is any[]} - */ + const ac = new AbortController() + const signal = ac.signal + const outerSignal = opts === null || opts === undefined ? undefined : opts.signal -/** @type {validateArray} */ -const validateArray = hideStackFrames((value, name, minLength = 0) => { - if (!ArrayIsArray(value)) { - throw new ERR_INVALID_ARG_TYPE(name, 'Array', value) + // Need to cleanup event listeners if last stream is readable + // https://github.com/nodejs/node/issues/35452 + const lastStreamCleanup = [] + validateAbortSignal(outerSignal, 'options.signal') + function abort() { + finishImpl(new AbortError()) } - if (value.length < minLength) { - const reason = `must be longer than ${minLength}` - throw new ERR_INVALID_ARG_VALUE(name, value, reason) + addAbortListener = addAbortListener || (__nccwpck_require__(5915).addAbortListener) + let disposable + if (outerSignal) { + disposable = addAbortListener(outerSignal, abort) } -}) - -/** - * @callback validateStringArray - * @param {*} value - * @param {string} name - * @returns {asserts value is string[]} - */ - -/** @type {validateStringArray} */ -function validateStringArray(value, name) { - validateArray(value, name) - for (let i = 0; i < value.length; i++) { - validateString(value[i], `${name}[${i}]`) - } -} - -/** - * @callback validateBooleanArray - * @param {*} value - * @param {string} name - * @returns {asserts value is boolean[]} - */ - -/** @type {validateBooleanArray} */ -function validateBooleanArray(value, name) { - validateArray(value, name) - for (let i = 0; i < value.length; i++) { - validateBoolean(value[i], `${name}[${i}]`) + let error + let value + const destroys = [] + let finishCount = 0 + function finish(err) { + finishImpl(err, --finishCount === 0) } -} - -/** - * @callback validateAbortSignalArray - * @param {*} value - * @param {string} name - * @returns {asserts value is AbortSignal[]} - */ - -/** @type {validateAbortSignalArray} */ -function validateAbortSignalArray(value, name) { - validateArray(value, name) - for (let i = 0; i < value.length; i++) { - const signal = value[i] - const indexedName = `${name}[${i}]` - if (signal == null) { - throw new ERR_INVALID_ARG_TYPE(indexedName, 'AbortSignal', signal) + function finishImpl(err, final) { + var _disposable + if (err && (!error || error.code === 'ERR_STREAM_PREMATURE_CLOSE')) { + error = err + } + if (!error && !final) { + return + } + while (destroys.length) { + destroys.shift()(error) + } + ;(_disposable = disposable) === null || _disposable === undefined ? undefined : _disposable[SymbolDispose]() + ac.abort() + if (final) { + if (!error) { + lastStreamCleanup.forEach((fn) => fn()) + } + process.nextTick(callback, error, value) } - validateAbortSignal(signal, indexedName) } -} + let ret + for (let i = 0; i < streams.length; i++) { + const stream = streams[i] + const reading = i < streams.length - 1 + const writing = i > 0 + const end = reading || (opts === null || opts === undefined ? undefined : opts.end) !== false + const isLastStream = i === streams.length - 1 + if (isNodeStream(stream)) { + if (end) { + const { destroy, cleanup } = destroyer(stream, reading, writing) + destroys.push(destroy) + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(cleanup) + } + } -/** - * @param {*} signal - * @param {string} [name='signal'] - * @returns {asserts signal is keyof signals} - */ -function validateSignalName(signal, name = 'signal') { - validateString(signal, name) - if (signals[signal] === undefined) { - if (signals[StringPrototypeToUpperCase(signal)] !== undefined) { - throw new ERR_UNKNOWN_SIGNAL(signal + ' (signals must use all capital letters)') + // Catch stream errors that occur after pipe/pump has completed. + function onError(err) { + if (err && err.name !== 'AbortError' && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + finish(err) + } + } + stream.on('error', onError) + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(() => { + stream.removeListener('error', onError) + }) + } } - throw new ERR_UNKNOWN_SIGNAL(signal) - } -} + if (i === 0) { + if (typeof stream === 'function') { + ret = stream({ + signal + }) + if (!isIterable(ret)) { + throw new ERR_INVALID_RETURN_VALUE('Iterable, AsyncIterable or Stream', 'source', ret) + } + } else if (isIterable(stream) || isReadableNodeStream(stream) || isTransformStream(stream)) { + ret = stream + } else { + ret = Duplex.from(stream) + } + } else if (typeof stream === 'function') { + if (isTransformStream(ret)) { + var _ret + ret = makeAsyncIterable((_ret = ret) === null || _ret === undefined ? undefined : _ret.readable) + } else { + ret = makeAsyncIterable(ret) + } + ret = stream(ret, { + signal + }) + if (reading) { + if (!isIterable(ret, true)) { + throw new ERR_INVALID_RETURN_VALUE('AsyncIterable', `transform[${i - 1}]`, ret) + } + } else { + var _ret2 + if (!PassThrough) { + PassThrough = __nccwpck_require__(12875) + } -/** - * @callback validateBuffer - * @param {*} buffer - * @param {string} [name='buffer'] - * @returns {asserts buffer is ArrayBufferView} - */ + // If the last argument to pipeline is not a stream + // we must create a proxy stream so that pipeline(...) + // always returns a stream which can be further + // composed through `.pipe(stream)`. -/** @type {validateBuffer} */ -const validateBuffer = hideStackFrames((buffer, name = 'buffer') => { - if (!isArrayBufferView(buffer)) { - throw new ERR_INVALID_ARG_TYPE(name, ['Buffer', 'TypedArray', 'DataView'], buffer) - } -}) + const pt = new PassThrough({ + objectMode: true + }) -/** - * @param {string} data - * @param {string} encoding - */ -function validateEncoding(data, encoding) { - const normalizedEncoding = normalizeEncoding(encoding) - const length = data.length - if (normalizedEncoding === 'hex' && length % 2 !== 0) { - throw new ERR_INVALID_ARG_VALUE('encoding', encoding, `is invalid for data of length ${length}`) + // Handle Promises/A+ spec, `then` could be a getter that throws on + // second use. + const then = (_ret2 = ret) === null || _ret2 === undefined ? undefined : _ret2.then + if (typeof then === 'function') { + finishCount++ + then.call( + ret, + (val) => { + value = val + if (val != null) { + pt.write(val) + } + if (end) { + pt.end() + } + process.nextTick(finish) + }, + (err) => { + pt.destroy(err) + process.nextTick(finish, err) + } + ) + } else if (isIterable(ret, true)) { + finishCount++ + pumpToNode(ret, pt, finish, { + end + }) + } else if (isReadableStream(ret) || isTransformStream(ret)) { + const toRead = ret.readable || ret + finishCount++ + pumpToNode(toRead, pt, finish, { + end + }) + } else { + throw new ERR_INVALID_RETURN_VALUE('AsyncIterable or Promise', 'destination', ret) + } + ret = pt + const { destroy, cleanup } = destroyer(ret, false, true) + destroys.push(destroy) + if (isLastStream) { + lastStreamCleanup.push(cleanup) + } + } + } else if (isNodeStream(stream)) { + if (isReadableNodeStream(ret)) { + finishCount += 2 + const cleanup = pipe(ret, stream, finish, { + end + }) + if (isReadable(stream) && isLastStream) { + lastStreamCleanup.push(cleanup) + } + } else if (isTransformStream(ret) || isReadableStream(ret)) { + const toRead = ret.readable || ret + finishCount++ + pumpToNode(toRead, stream, finish, { + end + }) + } else if (isIterable(ret)) { + finishCount++ + pumpToNode(ret, stream, finish, { + end + }) + } else { + throw new ERR_INVALID_ARG_TYPE( + 'val', + ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'], + ret + ) + } + ret = stream + } else if (isWebStream(stream)) { + if (isReadableNodeStream(ret)) { + finishCount++ + pumpToWeb(makeAsyncIterable(ret), stream, finish, { + end + }) + } else if (isReadableStream(ret) || isIterable(ret)) { + finishCount++ + pumpToWeb(ret, stream, finish, { + end + }) + } else if (isTransformStream(ret)) { + finishCount++ + pumpToWeb(ret.readable, stream, finish, { + end + }) + } else { + throw new ERR_INVALID_ARG_TYPE( + 'val', + ['Readable', 'Iterable', 'AsyncIterable', 'ReadableStream', 'TransformStream'], + ret + ) + } + ret = stream + } else { + ret = Duplex.from(stream) + } } -} - -/** - * Check that the port number is not NaN when coerced to a number, - * is an integer and that it falls within the legal range of port numbers. - * @param {*} port - * @param {string} [name='Port'] - * @param {boolean} [allowZero=true] - * @returns {number} - */ -function validatePort(port, name = 'Port', allowZero = true) { if ( - (typeof port !== 'number' && typeof port !== 'string') || - (typeof port === 'string' && StringPrototypeTrim(port).length === 0) || - +port !== +port >>> 0 || - port > 0xffff || - (port === 0 && !allowZero) + (signal !== null && signal !== undefined && signal.aborted) || + (outerSignal !== null && outerSignal !== undefined && outerSignal.aborted) ) { - throw new ERR_SOCKET_BAD_PORT(name, port, allowZero) - } - return port | 0 -} - -/** - * @callback validateAbortSignal - * @param {*} signal - * @param {string} name - */ - -/** @type {validateAbortSignal} */ -const validateAbortSignal = hideStackFrames((signal, name) => { - if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) { - throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal) - } -}) - -/** - * @callback validateFunction - * @param {*} value - * @param {string} name - * @returns {asserts value is Function} - */ - -/** @type {validateFunction} */ -const validateFunction = hideStackFrames((value, name) => { - if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value) -}) - -/** - * @callback validatePlainFunction - * @param {*} value - * @param {string} name - * @returns {asserts value is Function} - */ - -/** @type {validatePlainFunction} */ -const validatePlainFunction = hideStackFrames((value, name) => { - if (typeof value !== 'function' || isAsyncFunction(value)) throw new ERR_INVALID_ARG_TYPE(name, 'Function', value) -}) - -/** - * @callback validateUndefined - * @param {*} value - * @param {string} name - * @returns {asserts value is undefined} - */ - -/** @type {validateUndefined} */ -const validateUndefined = hideStackFrames((value, name) => { - if (value !== undefined) throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value) -}) - -/** - * @template T - * @param {T} value - * @param {string} name - * @param {T[]} union - */ -function validateUnion(value, name, union) { - if (!ArrayPrototypeIncludes(union, value)) { - throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value) + process.nextTick(abort) } + return ret } +function pipe(src, dst, finish, { end }) { + let ended = false + dst.on('close', () => { + if (!ended) { + // Finish if the destination closes before the source has completed. + finish(new ERR_STREAM_PREMATURE_CLOSE()) + } + }) + src.pipe(dst, { + end: false + }) // If end is true we already will have a listener to end dst. -/* - The rules for the Link header field are described here: - https://www.rfc-editor.org/rfc/rfc8288.html#section-3 - - This regex validates any string surrounded by angle brackets - (not necessarily a valid URI reference) followed by zero or more - link-params separated by semicolons. -*/ -const linkValueRegExp = /^(?:<[^>]*>)(?:\s*;\s*[^;"\s]+(?:=(")?[^;"\s]*\1)?)*$/ - -/** - * @param {any} value - * @param {string} name - */ -function validateLinkHeaderFormat(value, name) { - if (typeof value === 'undefined' || !RegExpPrototypeExec(linkValueRegExp, value)) { - throw new ERR_INVALID_ARG_VALUE( - name, - value, - 'must be an array or string of format "; rel=preload; as=style"' - ) - } -} + if (end) { + // Compat. Before node v10.12.0 stdio used to throw an error so + // pipe() did/does not end() stdio destinations. + // Now they allow it but "secretly" don't close the underlying fd. -/** - * @param {any} hints - * @return {string} - */ -function validateLinkHeaderValue(hints) { - if (typeof hints === 'string') { - validateLinkHeaderFormat(hints, 'hints') - return hints - } else if (ArrayIsArray(hints)) { - const hintsLength = hints.length - let result = '' - if (hintsLength === 0) { - return result + function endFn() { + ended = true + dst.end() } - for (let i = 0; i < hintsLength; i++) { - const link = hints[i] - validateLinkHeaderFormat(link, 'hints') - result += link - if (i !== hintsLength - 1) { - result += ', ' - } + if (isReadableFinished(src)) { + // End the destination if the source has already ended. + process.nextTick(endFn) + } else { + src.once('end', endFn) } - return result + } else { + finish() } - throw new ERR_INVALID_ARG_VALUE( - 'hints', - hints, - 'must be an array or string of format "; rel=preload; as=style"' + eos( + src, + { + readable: true, + writable: false + }, + (err) => { + const rState = src._readableState + if ( + err && + err.code === 'ERR_STREAM_PREMATURE_CLOSE' && + rState && + rState.ended && + !rState.errored && + !rState.errorEmitted + ) { + // Some readable streams will emit 'close' before 'end'. However, since + // this is on the readable side 'end' should still be emitted if the + // stream has been ended and no error emitted. This should be allowed in + // favor of backwards compatibility. Since the stream is piped to a + // destination this should not result in any observable difference. + // We don't need to check if this is a writable premature close since + // eos will only fail with premature close on the reading side for + // duplex streams. + src.once('end', finish).once('error', finish) + } else { + finish(err) + } + } + ) + return eos( + dst, + { + readable: false, + writable: true + }, + finish ) } module.exports = { - isInt32, - isUint32, - parseFileMode, - validateArray, - validateStringArray, - validateBooleanArray, - validateAbortSignalArray, - validateBoolean, - validateBuffer, - validateDictionary, - validateEncoding, - validateFunction, - validateInt32, - validateInteger, - validateNumber, - validateObject, - validateOneOf, - validatePlainFunction, - validatePort, - validateSignalName, - validateString, - validateUint32, - validateUndefined, - validateUnion, - validateAbortSignal, - validateLinkHeaderValue + pipelineImpl, + pipeline } /***/ }), -/***/ 49939: +/***/ 83176: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; - - -const { format, inspect, AggregateError: CustomAggregateError } = __nccwpck_require__(32504) - -/* - This file is a reduced and adapted version of the main lib/internal/errors.js file defined at +/* replacement start */ - https://github.com/nodejs/node/blob/master/lib/internal/errors.js +const process = __nccwpck_require__(76341) - Don't try to replace with the original file and keep it up to date (starting from E(...) definitions) - with the upstream file. -*/ +/* replacement end */ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -const AggregateError = globalThis.AggregateError || CustomAggregateError -const kIsNodeError = Symbol('kIsNodeError') -const kTypes = [ - 'string', - 'function', - 'number', - 'object', - // Accept 'Function' and 'Object' as alternative to the lower cased version. - 'Function', - 'Object', - 'boolean', - 'bigint', - 'symbol' -] -const classRegExp = /^([A-Z][a-z0-9]*)+$/ -const nodeInternalPrefix = '__node_internal_' -const codes = {} -function assert(value, message) { - if (!value) { - throw new codes.ERR_INTERNAL_ASSERTION(message) - } -} +;('use strict') +const { + ArrayPrototypeIndexOf, + NumberIsInteger, + NumberIsNaN, + NumberParseInt, + ObjectDefineProperties, + ObjectKeys, + ObjectSetPrototypeOf, + Promise, + SafeSet, + SymbolAsyncDispose, + SymbolAsyncIterator, + Symbol +} = __nccwpck_require__(62646) +module.exports = Readable +Readable.ReadableState = ReadableState +const { EventEmitter: EE } = __nccwpck_require__(82361) +const { Stream, prependListener } = __nccwpck_require__(27722) +const { Buffer } = __nccwpck_require__(14300) +const { addAbortSignal } = __nccwpck_require__(69832) +const eos = __nccwpck_require__(51604) +let debug = (__nccwpck_require__(5915).debuglog)('stream', (fn) => { + debug = fn +}) +const BufferList = __nccwpck_require__(92726) +const destroyImpl = __nccwpck_require__(80064) +const { getHighWaterMark, getDefaultHighWaterMark } = __nccwpck_require__(3813) +const { + aggregateTwoErrors, + codes: { + ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED, + ERR_OUT_OF_RANGE, + ERR_STREAM_PUSH_AFTER_EOF, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT + }, + AbortError +} = __nccwpck_require__(72184) +const { validateObject } = __nccwpck_require__(89371) +const kPaused = Symbol('kPaused') +const { StringDecoder } = __nccwpck_require__(71576) +const from = __nccwpck_require__(80169) +ObjectSetPrototypeOf(Readable.prototype, Stream.prototype) +ObjectSetPrototypeOf(Readable, Stream) +const nop = () => {} +const { errorOrDestroy } = destroyImpl +const kObjectMode = 1 << 0 +const kEnded = 1 << 1 +const kEndEmitted = 1 << 2 +const kReading = 1 << 3 +const kConstructed = 1 << 4 +const kSync = 1 << 5 +const kNeedReadable = 1 << 6 +const kEmittedReadable = 1 << 7 +const kReadableListening = 1 << 8 +const kResumeScheduled = 1 << 9 +const kErrorEmitted = 1 << 10 +const kEmitClose = 1 << 11 +const kAutoDestroy = 1 << 12 +const kDestroyed = 1 << 13 +const kClosed = 1 << 14 +const kCloseEmitted = 1 << 15 +const kMultiAwaitDrain = 1 << 16 +const kReadingMore = 1 << 17 +const kDataEmitted = 1 << 18 -// Only use this for integers! Decimal numbers do not work with this function. -function addNumericalSeparator(val) { - let res = '' - let i = val.length - const start = val[0] === '-' ? 1 : 0 - for (; i >= start + 4; i -= 3) { - res = `_${val.slice(i - 3, i)}${res}` +// TODO(benjamingr) it is likely slower to do it this way than with free functions +function makeBitMapDescriptor(bit) { + return { + enumerable: false, + get() { + return (this.state & bit) !== 0 + }, + set(value) { + if (value) this.state |= bit + else this.state &= ~bit + } } - return `${val.slice(0, i)}${res}` } -function getMessage(key, msg, args) { - if (typeof msg === 'function') { - assert( - msg.length <= args.length, - // Default options do not count. - `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${msg.length}).` - ) - return msg(...args) - } - const expectedLength = (msg.match(/%[dfijoOs]/g) || []).length - assert( - expectedLength === args.length, - `Code: ${key}; The provided arguments length (${args.length}) does not match the required ones (${expectedLength}).` - ) - if (args.length === 0) { - return msg +ObjectDefineProperties(ReadableState.prototype, { + objectMode: makeBitMapDescriptor(kObjectMode), + ended: makeBitMapDescriptor(kEnded), + endEmitted: makeBitMapDescriptor(kEndEmitted), + reading: makeBitMapDescriptor(kReading), + // Stream is still being constructed and cannot be + // destroyed until construction finished or failed. + // Async construction is opt in, therefore we start as + // constructed. + constructed: makeBitMapDescriptor(kConstructed), + // A flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + sync: makeBitMapDescriptor(kSync), + // Whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + needReadable: makeBitMapDescriptor(kNeedReadable), + emittedReadable: makeBitMapDescriptor(kEmittedReadable), + readableListening: makeBitMapDescriptor(kReadableListening), + resumeScheduled: makeBitMapDescriptor(kResumeScheduled), + // True if the error was already emitted and should not be thrown again. + errorEmitted: makeBitMapDescriptor(kErrorEmitted), + emitClose: makeBitMapDescriptor(kEmitClose), + autoDestroy: makeBitMapDescriptor(kAutoDestroy), + // Has it been destroyed. + destroyed: makeBitMapDescriptor(kDestroyed), + // Indicates whether the stream has finished destroying. + closed: makeBitMapDescriptor(kClosed), + // True if close has been emitted or would have been emitted + // depending on emitClose. + closeEmitted: makeBitMapDescriptor(kCloseEmitted), + multiAwaitDrain: makeBitMapDescriptor(kMultiAwaitDrain), + // If true, a maybeReadMore has been scheduled. + readingMore: makeBitMapDescriptor(kReadingMore), + dataEmitted: makeBitMapDescriptor(kDataEmitted) +}) +function ReadableState(options, stream, isDuplex) { + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof __nccwpck_require__(67799) + + // Bit map field to store ReadableState more effciently with 1 bit per field + // instead of a V8 slot per field. + this.state = kEmitClose | kAutoDestroy | kConstructed | kSync + // Object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away. + if (options && options.objectMode) this.state |= kObjectMode + if (isDuplex && options && options.readableObjectMode) this.state |= kObjectMode + + // The point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = options + ? getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex) + : getDefaultHighWaterMark(false) + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift(). + this.buffer = new BufferList() + this.length = 0 + this.pipes = [] + this.flowing = null + this[kPaused] = null + + // Should close be emitted on destroy. Defaults to true. + if (options && options.emitClose === false) this.state &= ~kEmitClose + + // Should .destroy() be called after 'end' (and potentially 'finish'). + if (options && options.autoDestroy === false) this.state &= ~kAutoDestroy + + // Indicates whether the stream has errored. When true no further + // _read calls, 'data' or 'readable' events should occur. This is needed + // since when autoDestroy is disabled we need a way to tell whether the + // stream has failed. + this.errored = null + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = (options && options.defaultEncoding) || 'utf8' + + // Ref the piped dest which we need a drain event on it + // type: null | Writable | Set. + this.awaitDrainWriters = null + this.decoder = null + this.encoding = null + if (options && options.encoding) { + this.decoder = new StringDecoder(options.encoding) + this.encoding = options.encoding } - return format(msg, ...args) } -function E(code, message, Base) { - if (!Base) { - Base = Error - } - class NodeError extends Base { - constructor(...args) { - super(getMessage(code, message, args)) - } - toString() { - return `${this.name} [${code}]: ${this.message}` - } +function Readable(options) { + if (!(this instanceof Readable)) return new Readable(options) + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5. + const isDuplex = this instanceof __nccwpck_require__(67799) + this._readableState = new ReadableState(options, this, isDuplex) + if (options) { + if (typeof options.read === 'function') this._read = options.read + if (typeof options.destroy === 'function') this._destroy = options.destroy + if (typeof options.construct === 'function') this._construct = options.construct + if (options.signal && !isDuplex) addAbortSignal(options.signal, this) } - Object.defineProperties(NodeError.prototype, { - name: { - value: Base.name, - writable: true, - enumerable: false, - configurable: true - }, - toString: { - value() { - return `${this.name} [${code}]: ${this.message}` - }, - writable: true, - enumerable: false, - configurable: true + Stream.call(this, options) + destroyImpl.construct(this, () => { + if (this._readableState.needReadable) { + maybeReadMore(this, this._readableState) } }) - NodeError.prototype.code = code - NodeError.prototype[kIsNodeError] = true - codes[code] = NodeError } -function hideStackFrames(fn) { - // We rename the functions that will be hidden to cut off the stacktrace - // at the outermost one - const hidden = nodeInternalPrefix + fn.name - Object.defineProperty(fn, 'name', { - value: hidden - }) - return fn +Readable.prototype.destroy = destroyImpl.destroy +Readable.prototype._undestroy = destroyImpl.undestroy +Readable.prototype._destroy = function (err, cb) { + cb(err) } -function aggregateTwoErrors(innerError, outerError) { - if (innerError && outerError && innerError !== outerError) { - if (Array.isArray(outerError.errors)) { - // If `outerError` is already an `AggregateError`. - outerError.errors.push(innerError) - return outerError - } - const err = new AggregateError([outerError, innerError], outerError.message) - err.code = outerError.code - return err - } - return innerError || outerError +Readable.prototype[EE.captureRejectionSymbol] = function (err) { + this.destroy(err) } -class AbortError extends Error { - constructor(message = 'The operation was aborted', options = undefined) { - if (options !== undefined && typeof options !== 'object') { - throw new codes.ERR_INVALID_ARG_TYPE('options', 'Object', options) - } - super(message, options) - this.code = 'ABORT_ERR' - this.name = 'AbortError' +Readable.prototype[SymbolAsyncDispose] = function () { + let error + if (!this.destroyed) { + error = this.readableEnded ? null : new AbortError() + this.destroy(error) } + return new Promise((resolve, reject) => eos(this, (err) => (err && err !== error ? reject(err) : resolve(null)))) } -E('ERR_ASSERTION', '%s', Error) -E( - 'ERR_INVALID_ARG_TYPE', - (name, expected, actual) => { - assert(typeof name === 'string', "'name' must be a string") - if (!Array.isArray(expected)) { - expected = [expected] - } - let msg = 'The ' - if (name.endsWith(' argument')) { - // For cases like 'first argument' - msg += `${name} ` - } else { - msg += `"${name}" ${name.includes('.') ? 'property' : 'argument'} ` - } - msg += 'must be ' - const types = [] - const instances = [] - const other = [] - for (const value of expected) { - assert(typeof value === 'string', 'All expected entries have to be of type string') - if (kTypes.includes(value)) { - types.push(value.toLowerCase()) - } else if (classRegExp.test(value)) { - instances.push(value) - } else { - assert(value !== 'object', 'The value "object" should be written as "Object"') - other.push(value) - } - } - // Special handle `object` in case other instances are allowed to outline - // the differences between each other. - if (instances.length > 0) { - const pos = types.indexOf('object') - if (pos !== -1) { - types.splice(types, pos, 1) - instances.push('Object') - } - } - if (types.length > 0) { - switch (types.length) { - case 1: - msg += `of type ${types[0]}` - break - case 2: - msg += `one of type ${types[0]} or ${types[1]}` - break - default: { - const last = types.pop() - msg += `one of type ${types.join(', ')}, or ${last}` - } - } - if (instances.length > 0 || other.length > 0) { - msg += ' or ' - } - } - if (instances.length > 0) { - switch (instances.length) { - case 1: - msg += `an instance of ${instances[0]}` - break - case 2: - msg += `an instance of ${instances[0]} or ${instances[1]}` - break - default: { - const last = instances.pop() - msg += `an instance of ${instances.join(', ')}, or ${last}` - } - } - if (other.length > 0) { - msg += ' or ' - } - } - switch (other.length) { - case 0: - break - case 1: - if (other[0].toLowerCase() !== other[0]) { - msg += 'an ' +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + return readableAddChunk(this, chunk, encoding, false) +} + +// Unshift should *always* be something directly out of read(). +Readable.prototype.unshift = function (chunk, encoding) { + return readableAddChunk(this, chunk, encoding, true) +} +function readableAddChunk(stream, chunk, encoding, addToFront) { + debug('readableAddChunk', chunk) + const state = stream._readableState + let err + if ((state.state & kObjectMode) === 0) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding + if (state.encoding !== encoding) { + if (addToFront && state.encoding) { + // When unshifting, if state.encoding is set, we have to save + // the string in the BufferList with the state encoding. + chunk = Buffer.from(chunk, encoding).toString(state.encoding) + } else { + chunk = Buffer.from(chunk, encoding) + encoding = '' } - msg += `${other[0]}` - break - case 2: - msg += `one of ${other[0]} or ${other[1]}` - break - default: { - const last = other.pop() - msg += `one of ${other.join(', ')}, or ${last}` } + } else if (chunk instanceof Buffer) { + encoding = '' + } else if (Stream._isUint8Array(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk) + encoding = '' + } else if (chunk != null) { + err = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk) } - if (actual == null) { - msg += `. Received ${actual}` - } else if (typeof actual === 'function' && actual.name) { - msg += `. Received function ${actual.name}` - } else if (typeof actual === 'object') { - var _actual$constructor - if ( - (_actual$constructor = actual.constructor) !== null && - _actual$constructor !== undefined && - _actual$constructor.name - ) { - msg += `. Received an instance of ${actual.constructor.name}` - } else { - const inspected = inspect(actual, { - depth: -1 - }) - msg += `. Received ${inspected}` - } + } + if (err) { + errorOrDestroy(stream, err) + } else if (chunk === null) { + state.state &= ~kReading + onEofChunk(stream, state) + } else if ((state.state & kObjectMode) !== 0 || (chunk && chunk.length > 0)) { + if (addToFront) { + if ((state.state & kEndEmitted) !== 0) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT()) + else if (state.destroyed || state.errored) return false + else addChunk(stream, state, chunk, true) + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()) + } else if (state.destroyed || state.errored) { + return false } else { - let inspected = inspect(actual, { - colors: false - }) - if (inspected.length > 25) { - inspected = `${inspected.slice(0, 25)}...` + state.state &= ~kReading + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk) + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false) + else maybeReadMore(stream, state) + } else { + addChunk(stream, state, chunk, false) } - msg += `. Received type ${typeof actual} (${inspected})` } - return msg - }, - TypeError -) -E( - 'ERR_INVALID_ARG_VALUE', - (name, value, reason = 'is invalid') => { - let inspected = inspect(value) - if (inspected.length > 128) { - inspected = inspected.slice(0, 128) + '...' + } else if (!addToFront) { + state.state &= ~kReading + maybeReadMore(stream, state) + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0) +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync && stream.listenerCount('data') > 0) { + // Use the guard to avoid creating `Set()` repeatedly + // when we have multiple pipes. + if ((state.state & kMultiAwaitDrain) !== 0) { + state.awaitDrainWriters.clear() + } else { + state.awaitDrainWriters = null } - const type = name.includes('.') ? 'property' : 'argument' - return `The ${type} '${name}' ${reason}. Received ${inspected}` - }, - TypeError -) -E( - 'ERR_INVALID_RETURN_VALUE', - (input, name, value) => { - var _value$constructor - const type = - value !== null && - value !== undefined && - (_value$constructor = value.constructor) !== null && - _value$constructor !== undefined && - _value$constructor.name - ? `instance of ${value.constructor.name}` - : `type ${typeof value}` - return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.` - }, - TypeError -) -E( - 'ERR_MISSING_ARGS', - (...args) => { - assert(args.length > 0, 'At least one arg needs to be specified') - let msg - const len = args.length - args = (Array.isArray(args) ? args : [args]).map((a) => `"${a}"`).join(' or ') - switch (len) { - case 1: - msg += `The ${args[0]} argument` - break - case 2: - msg += `The ${args[0]} and ${args[1]} arguments` - break - default: - { - const last = args.pop() - msg += `The ${args.join(', ')}, and ${last} arguments` - } - break - } - return `${msg} must be specified` - }, - TypeError -) -E( - 'ERR_OUT_OF_RANGE', - (str, range, input) => { - assert(range, 'Missing "range" argument') - let received - if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) { - received = addNumericalSeparator(String(input)) - } else if (typeof input === 'bigint') { - received = String(input) - if (input > 2n ** 32n || input < -(2n ** 32n)) { - received = addNumericalSeparator(received) - } - received += 'n' - } else { - received = inspect(input) - } - return `The value of "${str}" is out of range. It must be ${range}. Received ${received}` - }, - RangeError -) -E('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times', Error) -E('ERR_METHOD_NOT_IMPLEMENTED', 'The %s method is not implemented', Error) -E('ERR_STREAM_ALREADY_FINISHED', 'Cannot call %s after a stream was finished', Error) -E('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable', Error) -E('ERR_STREAM_DESTROYED', 'Cannot call %s after a stream was destroyed', Error) -E('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError) -E('ERR_STREAM_PREMATURE_CLOSE', 'Premature close', Error) -E('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF', Error) -E('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event', Error) -E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error) -E('ERR_UNKNOWN_ENCODING', 'Unknown encoding: %s', TypeError) -module.exports = { - AbortError, - aggregateTwoErrors: hideStackFrames(aggregateTwoErrors), - hideStackFrames, - codes + state.dataEmitted = true + stream.emit('data', chunk) + } else { + // Update the buffer info. + state.length += state.objectMode ? 1 : chunk.length + if (addToFront) state.buffer.unshift(chunk) + else state.buffer.push(chunk) + if ((state.state & kNeedReadable) !== 0) emitReadable(stream) + } + maybeReadMore(stream, state) } - - -/***/ }), - -/***/ 92567: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const Stream = __nccwpck_require__(12781) -if (Stream && process.env.READABLE_STREAM === 'disable') { - const promises = Stream.promises - - // Explicit export naming is needed for ESM - module.exports._uint8ArrayToBuffer = Stream._uint8ArrayToBuffer - module.exports._isUint8Array = Stream._isUint8Array - module.exports.isDisturbed = Stream.isDisturbed - module.exports.isErrored = Stream.isErrored - module.exports.isReadable = Stream.isReadable - module.exports.Readable = Stream.Readable - module.exports.Writable = Stream.Writable - module.exports.Duplex = Stream.Duplex - module.exports.Transform = Stream.Transform - module.exports.PassThrough = Stream.PassThrough - module.exports.addAbortSignal = Stream.addAbortSignal - module.exports.finished = Stream.finished - module.exports.destroy = Stream.destroy - module.exports.pipeline = Stream.pipeline - module.exports.compose = Stream.compose - Object.defineProperty(Stream, 'promises', { - configurable: true, - enumerable: true, - get() { - return promises - } - }) - module.exports.Stream = Stream.Stream -} else { - const CustomStream = __nccwpck_require__(93039) - const promises = __nccwpck_require__(20652) - const originalDestroy = CustomStream.Readable.destroy - module.exports = CustomStream.Readable - - // Explicit export naming is needed for ESM - module.exports._uint8ArrayToBuffer = CustomStream._uint8ArrayToBuffer - module.exports._isUint8Array = CustomStream._isUint8Array - module.exports.isDisturbed = CustomStream.isDisturbed - module.exports.isErrored = CustomStream.isErrored - module.exports.isReadable = CustomStream.isReadable - module.exports.Readable = CustomStream.Readable - module.exports.Writable = CustomStream.Writable - module.exports.Duplex = CustomStream.Duplex - module.exports.Transform = CustomStream.Transform - module.exports.PassThrough = CustomStream.PassThrough - module.exports.addAbortSignal = CustomStream.addAbortSignal - module.exports.finished = CustomStream.finished - module.exports.destroy = CustomStream.destroy - module.exports.destroy = originalDestroy - module.exports.pipeline = CustomStream.pipeline - module.exports.compose = CustomStream.compose - Object.defineProperty(CustomStream, 'promises', { - configurable: true, - enumerable: true, - get() { - return promises - } - }) - module.exports.Stream = CustomStream.Stream +Readable.prototype.isPaused = function () { + const state = this._readableState + return state[kPaused] === true || state.flowing === false } -// Allow default importing -module.exports["default"] = module.exports - - -/***/ }), +// Backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + const decoder = new StringDecoder(enc) + this._readableState.decoder = decoder + // If setEncoding(null), decoder.encoding equals utf8. + this._readableState.encoding = this._readableState.decoder.encoding + const buffer = this._readableState.buffer + // Iterate over current buffer to convert already stored Buffers: + let content = '' + for (const data of buffer) { + content += decoder.write(data) + } + buffer.clear() + if (content !== '') buffer.push(content) + this._readableState.length = content.length + return this +} -/***/ 62141: -/***/ ((module) => { +// Don't raise the hwm > 1GB. +const MAX_HWM = 0x40000000 +function computeNewHighWaterMark(n) { + if (n > MAX_HWM) { + throw new ERR_OUT_OF_RANGE('size', '<= 1GiB', n) + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts. + n-- + n |= n >>> 1 + n |= n >>> 2 + n |= n >>> 4 + n |= n >>> 8 + n |= n >>> 16 + n++ + } + return n +} -"use strict"; +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || (state.length === 0 && state.ended)) return 0 + if ((state.state & kObjectMode) !== 0) return 1 + if (NumberIsNaN(n)) { + // Only flow one buffer at a time. + if (state.flowing && state.length) return state.buffer.first().length + return state.length + } + if (n <= state.length) return n + return state.ended ? state.length : 0 +} +// You can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n) + // Same as parseInt(undefined, 10), however V8 7.3 performance regressed + // in this scenario, so we are doing it manually. + if (n === undefined) { + n = NaN + } else if (!NumberIsInteger(n)) { + n = NumberParseInt(n, 10) + } + const state = this._readableState + const nOrig = n -/* - This file is a reduced and adapted version of the main lib/internal/per_context/primordials.js file defined at + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n) + if (n !== 0) state.state &= ~kEmittedReadable - https://github.com/nodejs/node/blob/master/lib/internal/per_context/primordials.js + // If we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if ( + n === 0 && + state.needReadable && + ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended) + ) { + debug('read: emitReadable', state.length, state.ended) + if (state.length === 0 && state.ended) endReadable(this) + else emitReadable(this) + return null + } + n = howMuchToRead(n, state) - Don't try to replace with the original file and keep it up to date with the upstream file. -*/ -module.exports = { - ArrayIsArray(self) { - return Array.isArray(self) - }, - ArrayPrototypeIncludes(self, el) { - return self.includes(el) - }, - ArrayPrototypeIndexOf(self, el) { - return self.indexOf(el) - }, - ArrayPrototypeJoin(self, sep) { - return self.join(sep) - }, - ArrayPrototypeMap(self, fn) { - return self.map(fn) - }, - ArrayPrototypePop(self, el) { - return self.pop(el) - }, - ArrayPrototypePush(self, el) { - return self.push(el) - }, - ArrayPrototypeSlice(self, start, end) { - return self.slice(start, end) - }, - Error, - FunctionPrototypeCall(fn, thisArgs, ...args) { - return fn.call(thisArgs, ...args) - }, - FunctionPrototypeSymbolHasInstance(self, instance) { - return Function.prototype[Symbol.hasInstance].call(self, instance) - }, - MathFloor: Math.floor, - Number, - NumberIsInteger: Number.isInteger, - NumberIsNaN: Number.isNaN, - NumberMAX_SAFE_INTEGER: Number.MAX_SAFE_INTEGER, - NumberMIN_SAFE_INTEGER: Number.MIN_SAFE_INTEGER, - NumberParseInt: Number.parseInt, - ObjectDefineProperties(self, props) { - return Object.defineProperties(self, props) - }, - ObjectDefineProperty(self, name, prop) { - return Object.defineProperty(self, name, prop) - }, - ObjectGetOwnPropertyDescriptor(self, name) { - return Object.getOwnPropertyDescriptor(self, name) - }, - ObjectKeys(obj) { - return Object.keys(obj) - }, - ObjectSetPrototypeOf(target, proto) { - return Object.setPrototypeOf(target, proto) - }, - Promise, - PromisePrototypeCatch(self, fn) { - return self.catch(fn) - }, - PromisePrototypeThen(self, thenFn, catchFn) { - return self.then(thenFn, catchFn) - }, - PromiseReject(err) { - return Promise.reject(err) - }, - PromiseResolve(val) { - return Promise.resolve(val) - }, - ReflectApply: Reflect.apply, - RegExpPrototypeTest(self, value) { - return self.test(value) - }, - SafeSet: Set, - String, - StringPrototypeSlice(self, start, end) { - return self.slice(start, end) - }, - StringPrototypeToLowerCase(self) { - return self.toLowerCase() - }, - StringPrototypeToUpperCase(self) { - return self.toUpperCase() - }, - StringPrototypeTrim(self) { - return self.trim() - }, - Symbol, - SymbolFor: Symbol.for, - SymbolAsyncIterator: Symbol.asyncIterator, - SymbolHasInstance: Symbol.hasInstance, - SymbolIterator: Symbol.iterator, - SymbolDispose: Symbol.dispose || Symbol('Symbol.dispose'), - SymbolAsyncDispose: Symbol.asyncDispose || Symbol('Symbol.asyncDispose'), - TypedArrayPrototypeSet(self, buf, len) { - return self.set(buf, len) - }, - Boolean: Boolean, - Uint8Array -} + // If we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this) + return null + } + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. -/***/ }), + // if we need a readable event, then we need to do some reading. + let doRead = (state.state & kNeedReadable) !== 0 + debug('need readable', doRead) -/***/ 32504: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // If we currently have less than the highWaterMark, then also read some. + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true + debug('length less than watermark', doRead) + } -"use strict"; + // However, if we've ended, then there's no point, if we're already + // reading, then it's unnecessary, if we're constructing we have to wait, + // and if we're destroyed or errored, then it's not allowed, + if (state.ended || state.reading || state.destroyed || state.errored || !state.constructed) { + doRead = false + debug('reading, ended or constructing', doRead) + } else if (doRead) { + debug('do read') + state.state |= kReading | kSync + // If the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.state |= kNeedReadable + // Call internal read method + try { + this._read(state.highWaterMark) + } catch (err) { + errorOrDestroy(this, err) + } + state.state &= ~kSync -const bufferModule = __nccwpck_require__(14300) -const { kResistStopPropagation, SymbolDispose } = __nccwpck_require__(62141) -const AbortSignal = globalThis.AbortSignal || (__nccwpck_require__(43747).AbortSignal) -const AbortController = globalThis.AbortController || (__nccwpck_require__(43747).AbortController) -const AsyncFunction = Object.getPrototypeOf(async function () {}).constructor -const Blob = globalThis.Blob || bufferModule.Blob -/* eslint-disable indent */ -const isBlob = - typeof Blob !== 'undefined' - ? function isBlob(b) { - // eslint-disable-next-line indent - return b instanceof Blob - } - : function isBlob(b) { - return false - } -/* eslint-enable indent */ + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state) + } + let ret + if (n > 0) ret = fromList(n, state) + else ret = null + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark + n = 0 + } else { + state.length -= n + if (state.multiAwaitDrain) { + state.awaitDrainWriters.clear() + } else { + state.awaitDrainWriters = null + } + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true -const validateAbortSignal = (signal, name) => { - if (signal !== undefined && (signal === null || typeof signal !== 'object' || !('aborted' in signal))) { - throw new ERR_INVALID_ARG_TYPE(name, 'AbortSignal', signal) + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this) + } + if (ret !== null && !state.errorEmitted && !state.closeEmitted) { + state.dataEmitted = true + this.emit('data', ret) } + return ret } -const validateFunction = (value, name) => { - if (typeof value !== 'function') throw new ERR_INVALID_ARG_TYPE(name, 'Function', value) +function onEofChunk(stream, state) { + debug('onEofChunk') + if (state.ended) return + if (state.decoder) { + const chunk = state.decoder.end() + if (chunk && chunk.length) { + state.buffer.push(chunk) + state.length += state.objectMode ? 1 : chunk.length + } + } + state.ended = true + if (state.sync) { + // If we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call. + emitReadable(stream) + } else { + // Emit 'readable' now to make sure it gets picked up. + state.needReadable = false + state.emittedReadable = true + // We have to emit readable now that we are EOF. Modules + // in the ecosystem (e.g. dicer) rely on this event being sync. + emitReadable_(stream) + } } -// This is a simplified version of AggregateError -class AggregateError extends Error { - constructor(errors) { - if (!Array.isArray(errors)) { - throw new TypeError(`Expected input to be an Array, got ${typeof errors}`) - } - let message = '' - for (let i = 0; i < errors.length; i++) { - message += ` ${errors[i].stack}\n` - } - super(message) - this.name = 'AggregateError' - this.errors = errors +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + const state = stream._readableState + debug('emitReadable', state.needReadable, state.emittedReadable) + state.needReadable = false + if (!state.emittedReadable) { + debug('emitReadable', state.flowing) + state.emittedReadable = true + process.nextTick(emitReadable_, stream) } } -module.exports = { - AggregateError, - kEmptyObject: Object.freeze({}), - once(callback) { - let called = false - return function (...args) { - if (called) { - return - } - called = true - callback.apply(this, args) - } - }, - createDeferredPromise: function () { - let resolve - let reject +function emitReadable_(stream) { + const state = stream._readableState + debug('emitReadable_', state.destroyed, state.length, state.ended) + if (!state.destroyed && !state.errored && (state.length || state.ended)) { + stream.emit('readable') + state.emittedReadable = false + } - // eslint-disable-next-line promise/param-names - const promise = new Promise((res, rej) => { - resolve = res - reject = rej - }) - return { - promise, - resolve, - reject - } - }, - promisify(fn) { - return new Promise((resolve, reject) => { - fn((err, ...args) => { - if (err) { - return reject(err) - } - return resolve(...args) - }) - }) - }, - debuglog() { - return function () {} - }, - format(format, ...args) { - // Simplified version of https://nodejs.org/api/util.html#utilformatformat-args - return format.replace(/%([sdifj])/g, function (...[_unused, type]) { - const replacement = args.shift() - if (type === 'f') { - return replacement.toFixed(6) - } else if (type === 'j') { - return JSON.stringify(replacement) - } else if (type === 's' && typeof replacement === 'object') { - const ctor = replacement.constructor !== Object ? replacement.constructor.name : '' - return `${ctor} {}`.trim() - } else { - return replacement.toString() + // The stream needs another readable event if: + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark + flow(stream) +} + +// At this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore && state.constructed) { + state.readingMore = true + process.nextTick(maybeReadMore_, stream, state) + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while ( + !state.reading && + !state.ended && + (state.length < state.highWaterMark || (state.flowing && state.length === 0)) + ) { + const len = state.length + debug('maybeReadMore read 0') + stream.read(0) + if (len === state.length) + // Didn't get any data, stop spinning. + break + } + state.readingMore = false +} + +// Abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + throw new ERR_METHOD_NOT_IMPLEMENTED('_read()') +} +Readable.prototype.pipe = function (dest, pipeOpts) { + const src = this + const state = this._readableState + if (state.pipes.length === 1) { + if (!state.multiAwaitDrain) { + state.multiAwaitDrain = true + state.awaitDrainWriters = new SafeSet(state.awaitDrainWriters ? [state.awaitDrainWriters] : []) + } + } + state.pipes.push(dest) + debug('pipe count=%d opts=%j', state.pipes.length, pipeOpts) + const doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr + const endFn = doEnd ? onend : unpipe + if (state.endEmitted) process.nextTick(endFn) + else src.once('end', endFn) + dest.on('unpipe', onunpipe) + function onunpipe(readable, unpipeInfo) { + debug('onunpipe') + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true + cleanup() } - }) - }, - inspect(value) { - // Vastly simplified version of https://nodejs.org/api/util.html#utilinspectobject-options - switch (typeof value) { - case 'string': - if (value.includes("'")) { - if (!value.includes('"')) { - return `"${value}"` - } else if (!value.includes('`') && !value.includes('${')) { - return `\`${value}\`` - } - } - return `'${value}'` - case 'number': - if (isNaN(value)) { - return 'NaN' - } else if (Object.is(value, -0)) { - return String(value) - } - return value - case 'bigint': - return `${String(value)}n` - case 'boolean': - case 'undefined': - return String(value) - case 'object': - return '{}' } - }, - types: { - isAsyncFunction(fn) { - return fn instanceof AsyncFunction - }, - isArrayBufferView(arr) { - return ArrayBuffer.isView(arr) + } + function onend() { + debug('onend') + dest.end() + } + let ondrain + let cleanedUp = false + function cleanup() { + debug('cleanup') + // Cleanup event handlers once the pipe is broken. + dest.removeListener('close', onclose) + dest.removeListener('finish', onfinish) + if (ondrain) { + dest.removeListener('drain', ondrain) } - }, - isBlob, - deprecate(fn, message) { - return fn - }, - addAbortListener: - (__nccwpck_require__(82361).addAbortListener) || - function addAbortListener(signal, listener) { - if (signal === undefined) { - throw new ERR_INVALID_ARG_TYPE('signal', 'AbortSignal', signal) + dest.removeListener('error', onerror) + dest.removeListener('unpipe', onunpipe) + src.removeListener('end', onend) + src.removeListener('end', unpipe) + src.removeListener('data', ondata) + cleanedUp = true + + // If the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (ondrain && state.awaitDrainWriters && (!dest._writableState || dest._writableState.needDrain)) ondrain() + } + function pause() { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if (!cleanedUp) { + if (state.pipes.length === 1 && state.pipes[0] === dest) { + debug('false write response, pause', 0) + state.awaitDrainWriters = dest + state.multiAwaitDrain = false + } else if (state.pipes.length > 1 && state.pipes.includes(dest)) { + debug('false write response, pause', state.awaitDrainWriters.size) + state.awaitDrainWriters.add(dest) } - validateAbortSignal(signal, 'signal') - validateFunction(listener, 'listener') - let removeEventListener - if (signal.aborted) { - queueMicrotask(() => listener()) + src.pause() + } + if (!ondrain) { + // When the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + ondrain = pipeOnDrain(src, dest) + dest.on('drain', ondrain) + } + } + src.on('data', ondata) + function ondata(chunk) { + debug('ondata') + const ret = dest.write(chunk) + debug('dest.write', ret) + if (ret === false) { + pause() + } + } + + // If the dest has an error, then stop piping into it. + // However, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er) + unpipe() + dest.removeListener('error', onerror) + if (dest.listenerCount('error') === 0) { + const s = dest._writableState || dest._readableState + if (s && !s.errorEmitted) { + // User incorrectly emitted 'error' directly on the stream. + errorOrDestroy(dest, er) } else { - signal.addEventListener('abort', listener, { - __proto__: null, - once: true, - [kResistStopPropagation]: true - }) - removeEventListener = () => { - signal.removeEventListener('abort', listener) - } - } - return { - __proto__: null, - [SymbolDispose]() { - var _removeEventListener - ;(_removeEventListener = removeEventListener) === null || _removeEventListener === undefined - ? undefined - : _removeEventListener() - } - } - }, - AbortSignalAny: - AbortSignal.any || - function AbortSignalAny(signals) { - // Fast path if there is only one signal. - if (signals.length === 1) { - return signals[0] + dest.emit('error', er) } - const ac = new AbortController() - const abort = () => ac.abort() - signals.forEach((signal) => { - validateAbortSignal(signal, 'signals') - signal.addEventListener('abort', abort, { - once: true - }) - }) - ac.signal.addEventListener( - 'abort', - () => { - signals.forEach((signal) => signal.removeEventListener('abort', abort)) - }, - { - once: true - } - ) - return ac.signal } -} -module.exports.promisify.custom = Symbol.for('nodejs.util.promisify.custom') - - -/***/ }), + } -/***/ 93039: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror) -/* replacement start */ + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish) + unpipe() + } + dest.once('close', onclose) + function onfinish() { + debug('onfinish') + dest.removeListener('close', onclose) + unpipe() + } + dest.once('finish', onfinish) + function unpipe() { + debug('unpipe') + src.unpipe(dest) + } -const { Buffer } = __nccwpck_require__(14300) + // Tell the dest that it's being piped to. + dest.emit('pipe', src) -/* replacement end */ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. + // Start the flow if it hasn't been started already. -;('use strict') -const { ObjectDefineProperty, ObjectKeys, ReflectApply } = __nccwpck_require__(62141) -const { - promisify: { custom: customPromisify } -} = __nccwpck_require__(32504) -const { streamReturningOperators, promiseReturningOperators } = __nccwpck_require__(71563) -const { - codes: { ERR_ILLEGAL_CONSTRUCTOR } -} = __nccwpck_require__(49939) -const compose = __nccwpck_require__(83913) -const { setDefaultHighWaterMark, getDefaultHighWaterMark } = __nccwpck_require__(89025) -const { pipeline } = __nccwpck_require__(32361) -const { destroyer } = __nccwpck_require__(47493) -const eos = __nccwpck_require__(89217) -const internalBuffer = {} -const promises = __nccwpck_require__(20652) -const utils = __nccwpck_require__(46995) -const Stream = (module.exports = __nccwpck_require__(26501).Stream) -Stream.isDestroyed = utils.isDestroyed -Stream.isDisturbed = utils.isDisturbed -Stream.isErrored = utils.isErrored -Stream.isReadable = utils.isReadable -Stream.isWritable = utils.isWritable -Stream.Readable = __nccwpck_require__(14252) -for (const key of ObjectKeys(streamReturningOperators)) { - const op = streamReturningOperators[key] - function fn(...args) { - if (new.target) { - throw ERR_ILLEGAL_CONSTRUCTOR() - } - return Stream.Readable.from(ReflectApply(op, this, args)) + if (dest.writableNeedDrain === true) { + pause() + } else if (!state.flowing) { + debug('pipe resume') + src.resume() } - ObjectDefineProperty(fn, 'name', { - __proto__: null, - value: op.name - }) - ObjectDefineProperty(fn, 'length', { - __proto__: null, - value: op.length - }) - ObjectDefineProperty(Stream.Readable.prototype, key, { - __proto__: null, - value: fn, - enumerable: false, - configurable: true, - writable: true - }) + return dest } -for (const key of ObjectKeys(promiseReturningOperators)) { - const op = promiseReturningOperators[key] - function fn(...args) { - if (new.target) { - throw ERR_ILLEGAL_CONSTRUCTOR() +function pipeOnDrain(src, dest) { + return function pipeOnDrainFunctionResult() { + const state = src._readableState + + // `ondrain` will call directly, + // `this` maybe not a reference to dest, + // so we use the real dest here. + if (state.awaitDrainWriters === dest) { + debug('pipeOnDrain', 1) + state.awaitDrainWriters = null + } else if (state.multiAwaitDrain) { + debug('pipeOnDrain', state.awaitDrainWriters.size) + state.awaitDrainWriters.delete(dest) + } + if ((!state.awaitDrainWriters || state.awaitDrainWriters.size === 0) && src.listenerCount('data')) { + src.resume() } - return ReflectApply(op, this, args) } - ObjectDefineProperty(fn, 'name', { - __proto__: null, - value: op.name - }) - ObjectDefineProperty(fn, 'length', { - __proto__: null, - value: op.length - }) - ObjectDefineProperty(Stream.Readable.prototype, key, { - __proto__: null, - value: fn, - enumerable: false, - configurable: true, - writable: true - }) } -Stream.Writable = __nccwpck_require__(99601) -Stream.Duplex = __nccwpck_require__(7280) -Stream.Transform = __nccwpck_require__(84186) -Stream.PassThrough = __nccwpck_require__(23046) -Stream.pipeline = pipeline -const { addAbortSignal } = __nccwpck_require__(50275) -Stream.addAbortSignal = addAbortSignal -Stream.finished = eos -Stream.destroy = destroyer -Stream.compose = compose -Stream.setDefaultHighWaterMark = setDefaultHighWaterMark -Stream.getDefaultHighWaterMark = getDefaultHighWaterMark -ObjectDefineProperty(Stream, 'promises', { - __proto__: null, - configurable: true, - enumerable: true, - get() { - return promises - } -}) -ObjectDefineProperty(pipeline, customPromisify, { - __proto__: null, - enumerable: true, - get() { - return promises.pipeline +Readable.prototype.unpipe = function (dest) { + const state = this._readableState + const unpipeInfo = { + hasUnpiped: false } -}) -ObjectDefineProperty(eos, customPromisify, { - __proto__: null, - enumerable: true, - get() { - return promises.finished + + // If we're not piping anywhere, then do nothing. + if (state.pipes.length === 0) return this + if (!dest) { + // remove all. + const dests = state.pipes + state.pipes = [] + this.pause() + for (let i = 0; i < dests.length; i++) + dests[i].emit('unpipe', this, { + hasUnpiped: false + }) + return this } -}) -// Backwards-compat with node 0.4.x -Stream.Stream = Stream -Stream._isUint8Array = function isUint8Array(value) { - return value instanceof Uint8Array -} -Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { - return Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + // Try to find the right one. + const index = ArrayPrototypeIndexOf(state.pipes, dest) + if (index === -1) return this + state.pipes.splice(index, 1) + if (state.pipes.length === 0) this.pause() + dest.emit('unpipe', this, unpipeInfo) + return this } +// Set up data events if they are asked for +// Ensure readable listeners eventually get something. +Readable.prototype.on = function (ev, fn) { + const res = Stream.prototype.on.call(this, ev, fn) + const state = this._readableState + if (ev === 'data') { + // Update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0 -/***/ }), - -/***/ 20652: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const { ArrayPrototypePop, Promise } = __nccwpck_require__(62141) -const { isIterable, isNodeStream, isWebStream } = __nccwpck_require__(46995) -const { pipelineImpl: pl } = __nccwpck_require__(32361) -const { finished } = __nccwpck_require__(89217) -__nccwpck_require__(93039) -function pipeline(...streams) { - return new Promise((resolve, reject) => { - let signal - let end - const lastArg = streams[streams.length - 1] - if ( - lastArg && - typeof lastArg === 'object' && - !isNodeStream(lastArg) && - !isIterable(lastArg) && - !isWebStream(lastArg) - ) { - const options = ArrayPrototypePop(streams) - signal = options.signal - end = options.end + // Try start flowing on next tick if stream isn't explicitly paused. + if (state.flowing !== false) this.resume() + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true + state.flowing = false + state.emittedReadable = false + debug('on readable', state.length, state.reading) + if (state.length) { + emitReadable(this) + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this) + } } - pl( - streams, - (err, value) => { - if (err) { - reject(err) - } else { - resolve(value) - } - }, - { - signal, - end - } - ) - }) + } + return res } -module.exports = { - finished, - pipeline +Readable.prototype.addListener = Readable.prototype.on +Readable.prototype.removeListener = function (ev, fn) { + const res = Stream.prototype.removeListener.call(this, ev, fn) + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this) + } + return res +} +Readable.prototype.off = Readable.prototype.removeListener +Readable.prototype.removeAllListeners = function (ev) { + const res = Stream.prototype.removeAllListeners.apply(this, arguments) + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this) + } + return res } +function updateReadableListening(self) { + const state = self._readableState + state.readableListening = self.listenerCount('readable') > 0 + if (state.resumeScheduled && state[kPaused] === false) { + // Flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true + // Crude way to check if we should resume. + } else if (self.listenerCount('data') > 0) { + self.resume() + } else if (!state.readableListening) { + state.flowing = null + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0') + self.read(0) +} -/***/ }), +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + const state = this._readableState + if (!state.flowing) { + debug('resume') + // We flow only if there is no one listening + // for readable, but we still have to call + // resume(). + state.flowing = !state.readableListening + resume(this, state) + } + state[kPaused] = false + return this +} +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true + process.nextTick(resume_, stream, state) + } +} +function resume_(stream, state) { + debug('resume', state.reading) + if (!state.reading) { + stream.read(0) + } + state.resumeScheduled = false + stream.emit('resume') + flow(stream) + if (state.flowing && !state.reading) stream.read(0) +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing) + if (this._readableState.flowing !== false) { + debug('pause') + this._readableState.flowing = false + this.emit('pause') + } + this._readableState[kPaused] = true + return this +} +function flow(stream) { + const state = stream._readableState + debug('flow', state.flowing) + while (state.flowing && stream.read() !== null); +} -/***/ 64156: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// Wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + let paused = false -module.exports = readdirGlob; + // TODO (ronag): Should this.destroy(err) emit + // 'error' on the wrapped stream? Would require + // a static factory method, e.g. Readable.wrap(stream). -const fs = __nccwpck_require__(57147); -const { EventEmitter } = __nccwpck_require__(82361); -const { Minimatch } = __nccwpck_require__(56753); -const { resolve } = __nccwpck_require__(71017); + stream.on('data', (chunk) => { + if (!this.push(chunk) && stream.pause) { + paused = true + stream.pause() + } + }) + stream.on('end', () => { + this.push(null) + }) + stream.on('error', (err) => { + errorOrDestroy(this, err) + }) + stream.on('close', () => { + this.destroy() + }) + stream.on('destroy', () => { + this.destroy() + }) + this._read = () => { + if (paused && stream.resume) { + paused = false + stream.resume() + } + } -function readdir(dir, strict) { - return new Promise((resolve, reject) => { - fs.readdir(dir, {withFileTypes: true} ,(err, files) => { - if(err) { - switch (err.code) { - case 'ENOTDIR': // Not a directory - if(strict) { - reject(err); - } else { - resolve([]); - } - break; - case 'ENOTSUP': // Operation not supported - case 'ENOENT': // No such file or directory - case 'ENAMETOOLONG': // Filename too long - case 'UNKNOWN': - resolve([]); - break; - case 'ELOOP': // Too many levels of symbolic links - default: - reject(err); - break; - } - } else { - resolve(files); - } - }); - }); + // Proxy all the other methods. Important when wrapping filters and duplexes. + const streamKeys = ObjectKeys(stream) + for (let j = 1; j < streamKeys.length; j++) { + const i = streamKeys[j] + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = stream[i].bind(stream) + } + } + return this } -function stat(file, followSymlinks) { - return new Promise((resolve, reject) => { - const statFunc = followSymlinks ? fs.stat : fs.lstat; - statFunc(file, (err, stats) => { - if(err) { - switch (err.code) { - case 'ENOENT': - if(followSymlinks) { - // Fallback to lstat to handle broken links as files - resolve(stat(file, false)); - } else { - resolve(null); - } - break; - default: - resolve(null); - break; - } +Readable.prototype[SymbolAsyncIterator] = function () { + return streamToAsyncIterator(this) +} +Readable.prototype.iterator = function (options) { + if (options !== undefined) { + validateObject(options, 'options') + } + return streamToAsyncIterator(this, options) +} +function streamToAsyncIterator(stream, options) { + if (typeof stream.read !== 'function') { + stream = Readable.wrap(stream, { + objectMode: true + }) + } + const iter = createAsyncIterator(stream, options) + iter.stream = stream + return iter +} +async function* createAsyncIterator(stream, options) { + let callback = nop + function next(resolve) { + if (this === stream) { + callback() + callback = nop + } else { + callback = resolve + } + } + stream.on('readable', next) + let error + const cleanup = eos( + stream, + { + writable: false + }, + (err) => { + error = err ? aggregateTwoErrors(error, err) : null + callback() + callback = nop + } + ) + try { + while (true) { + const chunk = stream.destroyed ? null : stream.read() + if (chunk !== null) { + yield chunk + } else if (error) { + throw error + } else if (error === null) { + return } else { - resolve(stats); + await new Promise(next) } - }); - }); + } + } catch (err) { + error = aggregateTwoErrors(error, err) + throw error + } finally { + if ( + (error || (options === null || options === undefined ? undefined : options.destroyOnReturn) !== false) && + (error === undefined || stream._readableState.autoDestroy) + ) { + destroyImpl.destroyer(stream, null) + } else { + stream.off('readable', next) + cleanup() + } + } } -async function* exploreWalkAsync(dir, path, followSymlinks, useStat, shouldSkip, strict) { - let files = await readdir(path + dir, strict); - for(const file of files) { - let name = file.name; - if(name === undefined) { - // undefined file.name means the `withFileTypes` options is not supported by node - // we have to call the stat function to know if file is directory or not. - name = file; - useStat = true; +// Making it explicit these properties are not enumerable +// because otherwise some prototype manipulation in +// userland will fail. +ObjectDefineProperties(Readable.prototype, { + readable: { + __proto__: null, + get() { + const r = this._readableState + // r.readable === false means that this is part of a Duplex stream + // where the readable side was disabled upon construction. + // Compat. The user might manually disable readable side through + // deprecated setter. + return !!r && r.readable !== false && !r.destroyed && !r.errorEmitted && !r.endEmitted + }, + set(val) { + // Backwards compat. + if (this._readableState) { + this._readableState.readable = !!val + } } - const filename = dir + '/' + name; - const relative = filename.slice(1); // Remove the leading / - const absolute = path + '/' + relative; - let stats = null; - if(useStat || followSymlinks) { - stats = await stat(absolute, followSymlinks); + }, + readableDidRead: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.dataEmitted } - if(!stats && file.name !== undefined) { - stats = file; + }, + readableAborted: { + __proto__: null, + enumerable: false, + get: function () { + return !!( + this._readableState.readable !== false && + (this._readableState.destroyed || this._readableState.errored) && + !this._readableState.endEmitted + ) } - if(stats === null) { - stats = { isDirectory: () => false }; + }, + readableHighWaterMark: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.highWaterMark } - - if(stats.isDirectory()) { - if(!shouldSkip(relative)) { - yield {relative, absolute, stats}; - yield* exploreWalkAsync(filename, path, followSymlinks, useStat, shouldSkip, false); + }, + readableBuffer: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState && this._readableState.buffer + } + }, + readableFlowing: { + __proto__: null, + enumerable: false, + get: function () { + return this._readableState.flowing + }, + set: function (state) { + if (this._readableState) { + this._readableState.flowing = state } - } else { - yield {relative, absolute, stats}; } - } -} -async function* explore(path, followSymlinks, useStat, shouldSkip) { - yield* exploreWalkAsync('', path, followSymlinks, useStat, shouldSkip, true); -} - - -function readOptions(options) { - return { - pattern: options.pattern, - dot: !!options.dot, - noglobstar: !!options.noglobstar, - matchBase: !!options.matchBase, - nocase: !!options.nocase, - ignore: options.ignore, - skip: options.skip, - - follow: !!options.follow, - stat: !!options.stat, - nodir: !!options.nodir, - mark: !!options.mark, - silent: !!options.silent, - absolute: !!options.absolute - }; -} - -class ReaddirGlob extends EventEmitter { - constructor(cwd, options, cb) { - super(); - if(typeof options === 'function') { - cb = options; - options = null; + }, + readableLength: { + __proto__: null, + enumerable: false, + get() { + return this._readableState.length } - - this.options = readOptions(options || {}); - - this.matchers = []; - if(this.options.pattern) { - const matchers = Array.isArray(this.options.pattern) ? this.options.pattern : [this.options.pattern]; - this.matchers = matchers.map( m => - new Minimatch(m, { - dot: this.options.dot, - noglobstar:this.options.noglobstar, - matchBase:this.options.matchBase, - nocase:this.options.nocase - }) - ); + }, + readableObjectMode: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.objectMode : false } - - this.ignoreMatchers = []; - if(this.options.ignore) { - const ignorePatterns = Array.isArray(this.options.ignore) ? this.options.ignore : [this.options.ignore]; - this.ignoreMatchers = ignorePatterns.map( ignore => - new Minimatch(ignore, {dot: true}) - ); + }, + readableEncoding: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.encoding : null } - - this.skipMatchers = []; - if(this.options.skip) { - const skipPatterns = Array.isArray(this.options.skip) ? this.options.skip : [this.options.skip]; - this.skipMatchers = skipPatterns.map( skip => - new Minimatch(skip, {dot: true}) - ); + }, + errored: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.errored : null } - - this.iterator = explore(resolve(cwd || '.'), this.options.follow, this.options.stat, this._shouldSkipDirectory.bind(this)); - this.paused = false; - this.inactive = false; - this.aborted = false; - - if(cb) { - this._matches = []; - this.on('match', match => this._matches.push(this.options.absolute ? match.absolute : match.relative)); - this.on('error', err => cb(err)); - this.on('end', () => cb(null, this._matches)); + }, + closed: { + __proto__: null, + get() { + return this._readableState ? this._readableState.closed : false } + }, + destroyed: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.destroyed : false + }, + set(value) { + // We ignore the value if the stream + // has not been initialized yet. + if (!this._readableState) { + return + } - setTimeout( () => this._next(), 0); + // Backward compatibility, the user is explicitly + // managing destroyed. + this._readableState.destroyed = value + } + }, + readableEnded: { + __proto__: null, + enumerable: false, + get() { + return this._readableState ? this._readableState.endEmitted : false + } } - - _shouldSkipDirectory(relative) { - //console.log(relative, this.skipMatchers.some(m => m.match(relative))); - return this.skipMatchers.some(m => m.match(relative)); +}) +ObjectDefineProperties(ReadableState.prototype, { + // Legacy getter for `pipesCount`. + pipesCount: { + __proto__: null, + get() { + return this.pipes.length + } + }, + // Legacy property for `paused`. + paused: { + __proto__: null, + get() { + return this[kPaused] !== false + }, + set(value) { + this[kPaused] = !!value + } } +}) - _fileMatches(relative, isDirectory) { - const file = relative + (isDirectory ? '/' : ''); - return (this.matchers.length === 0 || this.matchers.some(m => m.match(file))) - && !this.ignoreMatchers.some(m => m.match(file)) - && (!this.options.nodir || !isDirectory); - } +// Exposed for testing purposes only. +Readable._fromList = fromList - _next() { - if(!this.paused && !this.aborted) { - this.iterator.next() - .then((obj)=> { - if(!obj.done) { - const isDirectory = obj.value.stats.isDirectory(); - if(this._fileMatches(obj.value.relative, isDirectory )) { - let relative = obj.value.relative; - let absolute = obj.value.absolute; - if(this.options.mark && isDirectory) { - relative += '/'; - absolute += '/'; - } - if(this.options.stat) { - this.emit('match', {relative, absolute, stat:obj.value.stats}); - } else { - this.emit('match', {relative, absolute}); - } - } - this._next(this.iterator); - } else { - this.emit('end'); - } - }) - .catch((err) => { - this.abort(); - this.emit('error', err); - if(!err.code && !this.options.silent) { - console.error(err); - } - }); - } else { - this.inactive = true; - } - } - - abort() { - this.aborted = true; +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered. + if (state.length === 0) return null + let ret + if (state.objectMode) ret = state.buffer.shift() + else if (!n || n >= state.length) { + // Read it all, truncate the list. + if (state.decoder) ret = state.buffer.join('') + else if (state.buffer.length === 1) ret = state.buffer.first() + else ret = state.buffer.concat(state.length) + state.buffer.clear() + } else { + // read part of list. + ret = state.buffer.consume(n, state.decoder) } - - pause() { - this.paused = true; + return ret +} +function endReadable(stream) { + const state = stream._readableState + debug('endReadable', state.endEmitted) + if (!state.endEmitted) { + state.ended = true + process.nextTick(endReadableNT, state, stream) } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length) - resume() { - this.paused = false; - if(this.inactive) { - this.inactive = false; - this._next(); + // Check that we didn't get one last unshift. + if (!state.errored && !state.closeEmitted && !state.endEmitted && state.length === 0) { + state.endEmitted = true + stream.emit('end') + if (stream.writable && stream.allowHalfOpen === false) { + process.nextTick(endWritableNT, stream) + } else if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well. + const wState = stream._writableState + const autoDestroy = + !wState || + (wState.autoDestroy && + // We don't expect the writable to ever 'finish' + // if writable is explicitly set to false. + (wState.finished || wState.writable === false)) + if (autoDestroy) { + stream.destroy() + } } } } - - -function readdirGlob(pattern, options, cb) { - return new ReaddirGlob(pattern, options, cb); +function endWritableNT(stream) { + const writable = stream.writable && !stream.writableEnded && !stream.destroyed + if (writable) { + stream.end() + } } -readdirGlob.ReaddirGlob = ReaddirGlob; - -/***/ }), - -/***/ 81330: -/***/ ((module) => { +Readable.from = function (iterable, opts) { + return from(Readable, iterable, opts) +} +let webStreamsAdapters -const isWindows = typeof process === 'object' && - process && - process.platform === 'win32' -module.exports = isWindows ? { sep: '\\' } : { sep: '/' } +// Lazy to avoid circular references +function lazyWebStreams() { + if (webStreamsAdapters === undefined) webStreamsAdapters = {} + return webStreamsAdapters +} +Readable.fromWeb = function (readableStream, options) { + return lazyWebStreams().newStreamReadableFromReadableStream(readableStream, options) +} +Readable.toWeb = function (streamReadable, options) { + return lazyWebStreams().newReadableStreamFromStreamReadable(streamReadable, options) +} +Readable.wrap = function (src, options) { + var _ref, _src$readableObjectMo + return new Readable({ + objectMode: + (_ref = + (_src$readableObjectMo = src.readableObjectMode) !== null && _src$readableObjectMo !== undefined + ? _src$readableObjectMo + : src.objectMode) !== null && _ref !== undefined + ? _ref + : true, + ...options, + destroy(err, callback) { + destroyImpl.destroyer(src, err) + callback(err) + } + }).wrap(src) +} /***/ }), -/***/ 56753: +/***/ 3813: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -const minimatch = module.exports = (p, pattern, options = {}) => { - assertValidPattern(pattern) - - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false - } - - return new Minimatch(pattern, options).match(p) -} - -module.exports = minimatch - -const path = __nccwpck_require__(81330) -minimatch.sep = path.sep +"use strict"; -const GLOBSTAR = Symbol('globstar **') -minimatch.GLOBSTAR = GLOBSTAR -const expand = __nccwpck_require__(44416) -const plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } +const { MathFloor, NumberIsInteger } = __nccwpck_require__(62646) +const { validateInteger } = __nccwpck_require__(89371) +const { ERR_INVALID_ARG_VALUE } = (__nccwpck_require__(72184).codes) +let defaultHighWaterMarkBytes = 16 * 1024 +let defaultHighWaterMarkObjectMode = 16 +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null } - -// any single thing other than / -// don't need to escape / when using new RegExp() -const qmark = '[^/]' - -// * => any number of characters -const star = qmark + '*?' - -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' - -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' - -// "abc" -> { a:true, b:true, c:true } -const charSet = s => s.split('').reduce((set, c) => { - set[c] = true - return set -}, {}) - -// characters that need to be escaped in RegExp. -const reSpecials = charSet('().*{}+?[]^$\\!') - -// characters that indicate we have to add the pattern start -const addPatternStartSet = charSet('[.(') - -// normalizes slashes. -const slashSplit = /\/+/ - -minimatch.filter = (pattern, options = {}) => - (p, i, list) => minimatch(p, pattern, options) - -const ext = (a, b = {}) => { - const t = {} - Object.keys(a).forEach(k => t[k] = a[k]) - Object.keys(b).forEach(k => t[k] = b[k]) - return t +function getDefaultHighWaterMark(objectMode) { + return objectMode ? defaultHighWaterMarkObjectMode : defaultHighWaterMarkBytes } - -minimatch.defaults = def => { - if (!def || typeof def !== 'object' || !Object.keys(def).length) { - return minimatch +function setDefaultHighWaterMark(objectMode, value) { + validateInteger(value, 'value', 0) + if (objectMode) { + defaultHighWaterMarkObjectMode = value + } else { + defaultHighWaterMarkBytes = value } - - const orig = minimatch - - const m = (p, pattern, options) => orig(p, pattern, ext(def, options)) - m.Minimatch = class Minimatch extends orig.Minimatch { - constructor (pattern, options) { - super(pattern, ext(def, options)) +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + const hwm = highWaterMarkFrom(options, isDuplex, duplexKey) + if (hwm != null) { + if (!NumberIsInteger(hwm) || hwm < 0) { + const name = isDuplex ? `options.${duplexKey}` : 'options.highWaterMark' + throw new ERR_INVALID_ARG_VALUE(name, hwm) } + return MathFloor(hwm) } - m.Minimatch.defaults = options => orig.defaults(ext(def, options)).Minimatch - m.filter = (pattern, options) => orig.filter(pattern, ext(def, options)) - m.defaults = options => orig.defaults(ext(def, options)) - m.makeRe = (pattern, options) => orig.makeRe(pattern, ext(def, options)) - m.braceExpand = (pattern, options) => orig.braceExpand(pattern, ext(def, options)) - m.match = (list, pattern, options) => orig.match(list, pattern, ext(def, options)) - return m + // Default value + return getDefaultHighWaterMark(state.objectMode) +} +module.exports = { + getHighWaterMark, + getDefaultHighWaterMark, + setDefaultHighWaterMark } +/***/ }), +/***/ 16994: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) // -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = (pattern, options) => braceExpand(pattern, options) +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. -const braceExpand = (pattern, options = {}) => { - assertValidPattern(pattern) - // Thanks to Yeting Li for - // improving this regexp to avoid a ReDOS vulnerability. - if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { - // shortcut. no need to expand. - return [pattern] - } - return expand(pattern) -} +const { ObjectSetPrototypeOf, Symbol } = __nccwpck_require__(62646) +module.exports = Transform +const { ERR_METHOD_NOT_IMPLEMENTED } = (__nccwpck_require__(72184).codes) +const Duplex = __nccwpck_require__(67799) +const { getHighWaterMark } = __nccwpck_require__(3813) +ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype) +ObjectSetPrototypeOf(Transform, Duplex) +const kCallback = Symbol('kCallback') +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options) -const MAX_PATTERN_LENGTH = 1024 * 64 -const assertValidPattern = pattern => { - if (typeof pattern !== 'string') { - throw new TypeError('invalid pattern') + // TODO (ronag): This should preferably always be + // applied but would be semver-major. Or even better; + // make Transform a Readable with the Writable interface. + const readableHighWaterMark = options ? getHighWaterMark(this, options, 'readableHighWaterMark', true) : null + if (readableHighWaterMark === 0) { + // A Duplex will buffer both on the writable and readable side while + // a Transform just wants to buffer hwm number of elements. To avoid + // buffering twice we disable buffering on the writable side. + options = { + ...options, + highWaterMark: null, + readableHighWaterMark, + // TODO (ronag): 0 is not optimal since we have + // a "bug" where we check needDrain before calling _write and not after. + // Refs: https://github.com/nodejs/node/pull/32887 + // Refs: https://github.com/nodejs/node/pull/35941 + writableHighWaterMark: options.writableHighWaterMark || 0 + } } + Duplex.call(this, options) - if (pattern.length > MAX_PATTERN_LENGTH) { - throw new TypeError('pattern is too long') + // We have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false + this[kCallback] = null + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform + if (typeof options.flush === 'function') this._flush = options.flush } -} - -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -const SUBPARSE = Symbol('subparse') - -minimatch.makeRe = (pattern, options) => - new Minimatch(pattern, options || {}).makeRe() -minimatch.match = (list, pattern, options = {}) => { - const mm = new Minimatch(pattern, options) - list = list.filter(f => mm.match(f)) - if (mm.options.nonull && !list.length) { - list.push(pattern) - } - return list + // When the writable side finishes, then flush out anything remaining. + // Backwards compat. Some Transform streams incorrectly implement _final + // instead of or in addition to _flush. By using 'prefinish' instead of + // implementing _final we continue supporting this unfortunate use case. + this.on('prefinish', prefinish) } - -// replace stuff like \* with * -const globUnescape = s => s.replace(/\\(.)/g, '$1') -const charUnescape = s => s.replace(/\\([^-\]])/g, '$1') -const regExpEscape = s => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') -const braExpEscape = s => s.replace(/[[\]\\]/g, '\\$&') - -class Minimatch { - constructor (pattern, options) { - assertValidPattern(pattern) - - if (!options) options = {} - - this.options = options - this.set = [] - this.pattern = pattern - this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || - options.allowWindowsEscape === false - if (this.windowsPathsNoEscape) { - this.pattern = this.pattern.replace(/\\/g, '/') +function final(cb) { + if (typeof this._flush === 'function' && !this.destroyed) { + this._flush((er, data) => { + if (er) { + if (cb) { + cb(er) + } else { + this.destroy(er) + } + return + } + if (data != null) { + this.push(data) + } + this.push(null) + if (cb) { + cb() + } + }) + } else { + this.push(null) + if (cb) { + cb() } - this.regexp = null - this.negate = false - this.comment = false - this.empty = false - this.partial = !!options.partial - - // make the set of regexps etc. - this.make() } - - debug () {} - - make () { - const pattern = this.pattern - const options = this.options - - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true +} +function prefinish() { + if (this._final !== final) { + final.call(this) + } +} +Transform.prototype._final = final +Transform.prototype._transform = function (chunk, encoding, callback) { + throw new ERR_METHOD_NOT_IMPLEMENTED('_transform()') +} +Transform.prototype._write = function (chunk, encoding, callback) { + const rState = this._readableState + const wState = this._writableState + const length = rState.length + this._transform(chunk, encoding, (err, val) => { + if (err) { + callback(err) return } - if (!pattern) { - this.empty = true - return + if (val != null) { + this.push(val) + } + if ( + wState.ended || + // Backwards compat. + length === rState.length || + // Backwards compat. + rState.length < rState.highWaterMark + ) { + callback() + } else { + this[kCallback] = callback } + }) +} +Transform.prototype._read = function () { + if (this[kCallback]) { + const callback = this[kCallback] + this[kCallback] = null + callback() + } +} - // step 1: figure out negation, etc. - this.parseNegate() - // step 2: expand braces - let set = this.globSet = this.braceExpand() +/***/ }), - if (options.debug) this.debug = (...args) => console.error(...args) +/***/ 58650: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.debug(this.pattern, set) +"use strict"; - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(s => s.split(slashSplit)) - this.debug(this.pattern, set) - - // glob --> regexps - set = set.map((s, si, set) => s.map(this.parse, this)) +const { SymbolAsyncIterator, SymbolIterator, SymbolFor } = __nccwpck_require__(62646) - this.debug(this.pattern, set) - - // filter out everything that didn't compile properly. - set = set.filter(s => s.indexOf(false) === -1) +// We need to use SymbolFor to make these globally available +// for interopt with readable-stream, i.e. readable-stream +// and node core needs to be able to read/write private state +// from each other for proper interoperability. +const kIsDestroyed = SymbolFor('nodejs.stream.destroyed') +const kIsErrored = SymbolFor('nodejs.stream.errored') +const kIsReadable = SymbolFor('nodejs.stream.readable') +const kIsWritable = SymbolFor('nodejs.stream.writable') +const kIsDisturbed = SymbolFor('nodejs.stream.disturbed') +const kIsClosedPromise = SymbolFor('nodejs.webstream.isClosedPromise') +const kControllerErrorFunction = SymbolFor('nodejs.webstream.controllerErrorFunction') +function isReadableNodeStream(obj, strict = false) { + var _obj$_readableState + return !!( + ( + obj && + typeof obj.pipe === 'function' && + typeof obj.on === 'function' && + (!strict || (typeof obj.pause === 'function' && typeof obj.resume === 'function')) && + (!obj._writableState || + ((_obj$_readableState = obj._readableState) === null || _obj$_readableState === undefined + ? undefined + : _obj$_readableState.readable) !== false) && + // Duplex + (!obj._writableState || obj._readableState) + ) // Writable has .pipe. + ) +} - this.debug(this.pattern, set) +function isWritableNodeStream(obj) { + var _obj$_writableState + return !!( + ( + obj && + typeof obj.write === 'function' && + typeof obj.on === 'function' && + (!obj._readableState || + ((_obj$_writableState = obj._writableState) === null || _obj$_writableState === undefined + ? undefined + : _obj$_writableState.writable) !== false) + ) // Duplex + ) +} - this.set = set - } +function isDuplexNodeStream(obj) { + return !!( + obj && + typeof obj.pipe === 'function' && + obj._readableState && + typeof obj.on === 'function' && + typeof obj.write === 'function' + ) +} +function isNodeStream(obj) { + return ( + obj && + (obj._readableState || + obj._writableState || + (typeof obj.write === 'function' && typeof obj.on === 'function') || + (typeof obj.pipe === 'function' && typeof obj.on === 'function')) + ) +} +function isReadableStream(obj) { + return !!( + obj && + !isNodeStream(obj) && + typeof obj.pipeThrough === 'function' && + typeof obj.getReader === 'function' && + typeof obj.cancel === 'function' + ) +} +function isWritableStream(obj) { + return !!(obj && !isNodeStream(obj) && typeof obj.getWriter === 'function' && typeof obj.abort === 'function') +} +function isTransformStream(obj) { + return !!(obj && !isNodeStream(obj) && typeof obj.readable === 'object' && typeof obj.writable === 'object') +} +function isWebStream(obj) { + return isReadableStream(obj) || isWritableStream(obj) || isTransformStream(obj) +} +function isIterable(obj, isAsync) { + if (obj == null) return false + if (isAsync === true) return typeof obj[SymbolAsyncIterator] === 'function' + if (isAsync === false) return typeof obj[SymbolIterator] === 'function' + return typeof obj[SymbolAsyncIterator] === 'function' || typeof obj[SymbolIterator] === 'function' +} +function isDestroyed(stream) { + if (!isNodeStream(stream)) return null + const wState = stream._writableState + const rState = stream._readableState + const state = wState || rState + return !!(stream.destroyed || stream[kIsDestroyed] || (state !== null && state !== undefined && state.destroyed)) +} - parseNegate () { - if (this.options.nonegate) return +// Have been end():d. +function isWritableEnded(stream) { + if (!isWritableNodeStream(stream)) return null + if (stream.writableEnded === true) return true + const wState = stream._writableState + if (wState !== null && wState !== undefined && wState.errored) return false + if (typeof (wState === null || wState === undefined ? undefined : wState.ended) !== 'boolean') return null + return wState.ended +} - const pattern = this.pattern - let negate = false - let negateOffset = 0 +// Have emitted 'finish'. +function isWritableFinished(stream, strict) { + if (!isWritableNodeStream(stream)) return null + if (stream.writableFinished === true) return true + const wState = stream._writableState + if (wState !== null && wState !== undefined && wState.errored) return false + if (typeof (wState === null || wState === undefined ? undefined : wState.finished) !== 'boolean') return null + return !!(wState.finished || (strict === false && wState.ended === true && wState.length === 0)) +} - for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) { - negate = !negate - negateOffset++ - } +// Have been push(null):d. +function isReadableEnded(stream) { + if (!isReadableNodeStream(stream)) return null + if (stream.readableEnded === true) return true + const rState = stream._readableState + if (!rState || rState.errored) return false + if (typeof (rState === null || rState === undefined ? undefined : rState.ended) !== 'boolean') return null + return rState.ended +} - if (negateOffset) this.pattern = pattern.slice(negateOffset) - this.negate = negate +// Have emitted 'end'. +function isReadableFinished(stream, strict) { + if (!isReadableNodeStream(stream)) return null + const rState = stream._readableState + if (rState !== null && rState !== undefined && rState.errored) return false + if (typeof (rState === null || rState === undefined ? undefined : rState.endEmitted) !== 'boolean') return null + return !!(rState.endEmitted || (strict === false && rState.ended === true && rState.length === 0)) +} +function isReadable(stream) { + if (stream && stream[kIsReadable] != null) return stream[kIsReadable] + if (typeof (stream === null || stream === undefined ? undefined : stream.readable) !== 'boolean') return null + if (isDestroyed(stream)) return false + return isReadableNodeStream(stream) && stream.readable && !isReadableFinished(stream) +} +function isWritable(stream) { + if (stream && stream[kIsWritable] != null) return stream[kIsWritable] + if (typeof (stream === null || stream === undefined ? undefined : stream.writable) !== 'boolean') return null + if (isDestroyed(stream)) return false + return isWritableNodeStream(stream) && stream.writable && !isWritableEnded(stream) +} +function isFinished(stream, opts) { + if (!isNodeStream(stream)) { + return null + } + if (isDestroyed(stream)) { + return true + } + if ((opts === null || opts === undefined ? undefined : opts.readable) !== false && isReadable(stream)) { + return false + } + if ((opts === null || opts === undefined ? undefined : opts.writable) !== false && isWritable(stream)) { + return false + } + return true +} +function isWritableErrored(stream) { + var _stream$_writableStat, _stream$_writableStat2 + if (!isNodeStream(stream)) { + return null + } + if (stream.writableErrored) { + return stream.writableErrored + } + return (_stream$_writableStat = + (_stream$_writableStat2 = stream._writableState) === null || _stream$_writableStat2 === undefined + ? undefined + : _stream$_writableStat2.errored) !== null && _stream$_writableStat !== undefined + ? _stream$_writableStat + : null +} +function isReadableErrored(stream) { + var _stream$_readableStat, _stream$_readableStat2 + if (!isNodeStream(stream)) { + return null + } + if (stream.readableErrored) { + return stream.readableErrored + } + return (_stream$_readableStat = + (_stream$_readableStat2 = stream._readableState) === null || _stream$_readableStat2 === undefined + ? undefined + : _stream$_readableStat2.errored) !== null && _stream$_readableStat !== undefined + ? _stream$_readableStat + : null +} +function isClosed(stream) { + if (!isNodeStream(stream)) { + return null + } + if (typeof stream.closed === 'boolean') { + return stream.closed + } + const wState = stream._writableState + const rState = stream._readableState + if ( + typeof (wState === null || wState === undefined ? undefined : wState.closed) === 'boolean' || + typeof (rState === null || rState === undefined ? undefined : rState.closed) === 'boolean' + ) { + return ( + (wState === null || wState === undefined ? undefined : wState.closed) || + (rState === null || rState === undefined ? undefined : rState.closed) + ) + } + if (typeof stream._closed === 'boolean' && isOutgoingMessage(stream)) { + return stream._closed } + return null +} +function isOutgoingMessage(stream) { + return ( + typeof stream._closed === 'boolean' && + typeof stream._defaultKeepAlive === 'boolean' && + typeof stream._removedConnection === 'boolean' && + typeof stream._removedContLen === 'boolean' + ) +} +function isServerResponse(stream) { + return typeof stream._sent100 === 'boolean' && isOutgoingMessage(stream) +} +function isServerRequest(stream) { + var _stream$req + return ( + typeof stream._consuming === 'boolean' && + typeof stream._dumped === 'boolean' && + ((_stream$req = stream.req) === null || _stream$req === undefined ? undefined : _stream$req.upgradeOrConnect) === + undefined + ) +} +function willEmitClose(stream) { + if (!isNodeStream(stream)) return null + const wState = stream._writableState + const rState = stream._readableState + const state = wState || rState + return ( + (!state && isServerResponse(stream)) || !!(state && state.autoDestroy && state.emitClose && state.closed === false) + ) +} +function isDisturbed(stream) { + var _stream$kIsDisturbed + return !!( + stream && + ((_stream$kIsDisturbed = stream[kIsDisturbed]) !== null && _stream$kIsDisturbed !== undefined + ? _stream$kIsDisturbed + : stream.readableDidRead || stream.readableAborted) + ) +} +function isErrored(stream) { + var _ref, + _ref2, + _ref3, + _ref4, + _ref5, + _stream$kIsErrored, + _stream$_readableStat3, + _stream$_writableStat3, + _stream$_readableStat4, + _stream$_writableStat4 + return !!( + stream && + ((_ref = + (_ref2 = + (_ref3 = + (_ref4 = + (_ref5 = + (_stream$kIsErrored = stream[kIsErrored]) !== null && _stream$kIsErrored !== undefined + ? _stream$kIsErrored + : stream.readableErrored) !== null && _ref5 !== undefined + ? _ref5 + : stream.writableErrored) !== null && _ref4 !== undefined + ? _ref4 + : (_stream$_readableStat3 = stream._readableState) === null || _stream$_readableStat3 === undefined + ? undefined + : _stream$_readableStat3.errorEmitted) !== null && _ref3 !== undefined + ? _ref3 + : (_stream$_writableStat3 = stream._writableState) === null || _stream$_writableStat3 === undefined + ? undefined + : _stream$_writableStat3.errorEmitted) !== null && _ref2 !== undefined + ? _ref2 + : (_stream$_readableStat4 = stream._readableState) === null || _stream$_readableStat4 === undefined + ? undefined + : _stream$_readableStat4.errored) !== null && _ref !== undefined + ? _ref + : (_stream$_writableStat4 = stream._writableState) === null || _stream$_writableStat4 === undefined + ? undefined + : _stream$_writableStat4.errored) + ) +} +module.exports = { + isDestroyed, + kIsDestroyed, + isDisturbed, + kIsDisturbed, + isErrored, + kIsErrored, + isReadable, + kIsReadable, + kIsClosedPromise, + kControllerErrorFunction, + kIsWritable, + isClosed, + isDuplexNodeStream, + isFinished, + isIterable, + isReadableNodeStream, + isReadableStream, + isReadableEnded, + isReadableFinished, + isReadableErrored, + isNodeStream, + isWebStream, + isWritable, + isWritableNodeStream, + isWritableStream, + isWritableEnded, + isWritableFinished, + isWritableErrored, + isServerRequest, + isServerResponse, + willEmitClose, + isTransformStream +} - // set partial to true to test if, for example, - // "/a/b" matches the start of "/*/b/*/d" - // Partial means, if you run out of file before you run - // out of pattern, then that's fine, as long as all - // the parts match. - matchOne (file, pattern, partial) { - var options = this.options - this.debug('matchOne', - { 'this': this, file: file, pattern: pattern }) +/***/ }), - this.debug('matchOne', file.length, pattern.length) +/***/ 59482: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - for (var fi = 0, - pi = 0, - fl = file.length, - pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi++, pi++) { - this.debug('matchOne loop') - var p = pattern[pi] - var f = file[fi] +/* replacement start */ - this.debug(pattern, p, f) +const process = __nccwpck_require__(76341) - // should be impossible. - // some invalid regexp stuff in the set. - /* istanbul ignore if */ - if (p === false) return false +/* replacement end */ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - if (p === GLOBSTAR) { - this.debug('GLOBSTAR', [pattern, p, f]) +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - var pr = pi + 1 - if (pr === pl) { - this.debug('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for (; fi < fl; fi++) { - if (file[fi] === '.' || file[fi] === '..' || - (!options.dot && file[fi].charAt(0) === '.')) return false - } - return true - } +;('use strict') +const { + ArrayPrototypeSlice, + Error, + FunctionPrototypeSymbolHasInstance, + ObjectDefineProperty, + ObjectDefineProperties, + ObjectSetPrototypeOf, + StringPrototypeToLowerCase, + Symbol, + SymbolHasInstance +} = __nccwpck_require__(62646) +module.exports = Writable +Writable.WritableState = WritableState +const { EventEmitter: EE } = __nccwpck_require__(82361) +const Stream = (__nccwpck_require__(27722).Stream) +const { Buffer } = __nccwpck_require__(14300) +const destroyImpl = __nccwpck_require__(80064) +const { addAbortSignal } = __nccwpck_require__(69832) +const { getHighWaterMark, getDefaultHighWaterMark } = __nccwpck_require__(3813) +const { + ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED, + ERR_STREAM_ALREADY_FINISHED, + ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING +} = (__nccwpck_require__(72184).codes) +const { errorOrDestroy } = destroyImpl +ObjectSetPrototypeOf(Writable.prototype, Stream.prototype) +ObjectSetPrototypeOf(Writable, Stream) +function nop() {} +const kOnFinished = Symbol('kOnFinished') +function WritableState(options, stream, isDuplex) { + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof __nccwpck_require__(67799) - // ok, let's see if we can swallow whatever we can. - while (fr < fl) { - var swallowee = file[fr] + // Object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!(options && options.objectMode) + if (isDuplex) this.objectMode = this.objectMode || !!(options && options.writableObjectMode) - this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + // The point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write(). + this.highWaterMark = options + ? getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex) + : getDefaultHighWaterMark(false) - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === '.' || swallowee === '..' || - (!options.dot && swallowee.charAt(0) === '.')) { - this.debug('dot detected!', file, fr, pattern, pr) - break - } + // if _final has been called. + this.finalCalled = false - // ** swallows a segment, and continue. - this.debug('globstar swallow a segment, and continue') - fr++ - } - } + // drain event flag. + this.needDrain = false + // At the start of calling end() + this.ending = false + // When end() has been called, and returned. + this.ended = false + // When 'finish' is emitted. + this.finished = false - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - /* istanbul ignore if */ - if (partial) { - // ran out of file - this.debug('\n>>> no match, partial?', file, fr, pattern, pr) - if (fr === fl) return true - } - return false - } - - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === 'string') { - hit = f === p - this.debug('string match', p, f, hit) - } else { - hit = f.match(p) - this.debug('pattern match', p, f, hit) - } - - if (!hit) return false - } - - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* - - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else /* istanbul ignore else */ if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - return (fi === fl - 1) && (file[fi] === '') - } + // Has it been destroyed + this.destroyed = false - // should be unreachable. - /* istanbul ignore next */ - throw new Error('wtf?') - } + // Should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + const noDecode = !!(options && options.decodeStrings === false) + this.decodeStrings = !noDecode - braceExpand () { - return braceExpand(this.pattern, this.options) - } + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = (options && options.defaultEncoding) || 'utf8' - parse (pattern, isSub) { - assertValidPattern(pattern) + // Not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0 - const options = this.options + // A flag to see when we're in the middle of a write. + this.writing = false - // shortcuts - if (pattern === '**') { - if (!options.noglobstar) - return GLOBSTAR - else - pattern = '*' - } - if (pattern === '') return '' + // When true all writes will be buffered until .uncork() call. + this.corked = 0 - let re = '' - let hasMagic = false - let escaping = false - // ? => one single character - const patternListStack = [] - const negativeLists = [] - let stateChar - let inClass = false - let reClassStart = -1 - let classStart = -1 - let cs - let pl - let sp - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. However, if the pattern - // starts with ., then traversal patterns can match. - let dotTravAllowed = pattern.charAt(0) === '.' - let dotFileAllowed = options.dot || dotTravAllowed - const patternStart = () => - dotTravAllowed - ? '' - : dotFileAllowed - ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' - : '(?!\\.)' - const subPatternStart = (p) => - p.charAt(0) === '.' - ? '' - : options.dot - ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' - : '(?!\\.)' + // A flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true + // A flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false - const clearStateChar = () => { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case '*': - re += star - hasMagic = true - break - case '?': - re += qmark - hasMagic = true - break - default: - re += '\\' + stateChar - break - } - this.debug('clearStateChar %j %j', stateChar, re) - stateChar = false - } - } + // The callback that's passed to _write(chunk, cb). + this.onwrite = onwrite.bind(undefined, stream) - for (let i = 0, c; (i < pattern.length) && (c = pattern.charAt(i)); i++) { - this.debug('%s\t%s %s %j', pattern, i, re, c) + // The callback that the user supplies to write(chunk, encoding, cb). + this.writecb = null - // skip over any that are escaped. - if (escaping) { - /* istanbul ignore next - completely not allowed, even escaped. */ - if (c === '/') { - return false - } + // The amount that is being written when _write is called. + this.writelen = 0 - if (reSpecials[c]) { - re += '\\' - } - re += c - escaping = false - continue - } + // Storage for data passed to the afterWrite() callback in case of + // synchronous _write() completion. + this.afterWriteTickInfo = null + resetBuffer(this) - switch (c) { - /* istanbul ignore next */ - case '/': { - // Should already be path-split by now. - return false - } + // Number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted. + this.pendingcb = 0 - case '\\': - if (inClass && pattern.charAt(i + 1) === '-') { - re += c - continue - } + // Stream is still being constructed and cannot be + // destroyed until construction finished or failed. + // Async construction is opt in, therefore we start as + // constructed. + this.constructed = true - clearStateChar() - escaping = true - continue + // Emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams. + this.prefinished = false - // the various stateChar values - // for the "extglob" stuff. - case '?': - case '*': - case '+': - case '@': - case '!': - this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + // True if the error was already emitted and should not be thrown again. + this.errorEmitted = false - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - this.debug(' in class') - if (c === '!' && i === classStart + 1) c = '^' - re += c - continue - } + // Should close be emitted on destroy. Defaults to true. + this.emitClose = !options || options.emitClose !== false - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - this.debug('call clearStateChar %j', stateChar) - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue + // Should .destroy() be called after 'finish' (and potentially 'end'). + this.autoDestroy = !options || options.autoDestroy !== false - case '(': { - if (inClass) { - re += '(' - continue - } + // Indicates whether the stream has errored. When true all write() calls + // should return false. This is needed since when autoDestroy + // is disabled we need a way to tell whether the stream has failed. + this.errored = null - if (!stateChar) { - re += '\\(' - continue - } + // Indicates whether the stream has finished destroying. + this.closed = false - const plEntry = { - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close, - } - this.debug(this.pattern, '\t', plEntry) - patternListStack.push(plEntry) - // negation is (?:(?!(?:js)(?:))[^/]*) - re += plEntry.open - // next entry starts with a dot maybe? - if (plEntry.start === 0 && plEntry.type !== '!') { - dotTravAllowed = true - re += subPatternStart(pattern.slice(i + 1)) - } - this.debug('plType %j %j', stateChar, re) - stateChar = false - continue - } + // True if close has been emitted or would have been emitted + // depending on emitClose. + this.closeEmitted = false + this[kOnFinished] = [] +} +function resetBuffer(state) { + state.buffered = [] + state.bufferedIndex = 0 + state.allBuffers = true + state.allNoop = true +} +WritableState.prototype.getBuffer = function getBuffer() { + return ArrayPrototypeSlice(this.buffered, this.bufferedIndex) +} +ObjectDefineProperty(WritableState.prototype, 'bufferedRequestCount', { + __proto__: null, + get() { + return this.buffered.length - this.bufferedIndex + } +}) +function Writable(options) { + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. - case ')': { - const plEntry = patternListStack[patternListStack.length - 1] - if (inClass || !plEntry) { - re += '\\)' - continue - } - patternListStack.pop() + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. - // closing an extglob - clearStateChar() - hasMagic = true - pl = plEntry - // negation is (?:(?!js)[^/]*) - // The others are (?:) - re += pl.close - if (pl.type === '!') { - negativeLists.push(Object.assign(pl, { reEnd: re.length })) - } - continue - } + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5. + const isDuplex = this instanceof __nccwpck_require__(67799) + if (!isDuplex && !FunctionPrototypeSymbolHasInstance(Writable, this)) return new Writable(options) + this._writableState = new WritableState(options, this, isDuplex) + if (options) { + if (typeof options.write === 'function') this._write = options.write + if (typeof options.writev === 'function') this._writev = options.writev + if (typeof options.destroy === 'function') this._destroy = options.destroy + if (typeof options.final === 'function') this._final = options.final + if (typeof options.construct === 'function') this._construct = options.construct + if (options.signal) addAbortSignal(options.signal, this) + } + Stream.call(this, options) + destroyImpl.construct(this, () => { + const state = this._writableState + if (!state.writing) { + clearBuffer(this, state) + } + finishMaybe(this, state) + }) +} +ObjectDefineProperty(Writable, SymbolHasInstance, { + __proto__: null, + value: function (object) { + if (FunctionPrototypeSymbolHasInstance(this, object)) return true + if (this !== Writable) return false + return object && object._writableState instanceof WritableState + } +}) - case '|': { - const plEntry = patternListStack[patternListStack.length - 1] - if (inClass || !plEntry) { - re += '\\|' - continue - } +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()) +} +function _write(stream, chunk, encoding, cb) { + const state = stream._writableState + if (typeof encoding === 'function') { + cb = encoding + encoding = state.defaultEncoding + } else { + if (!encoding) encoding = state.defaultEncoding + else if (encoding !== 'buffer' && !Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding) + if (typeof cb !== 'function') cb = nop + } + if (chunk === null) { + throw new ERR_STREAM_NULL_VALUES() + } else if (!state.objectMode) { + if (typeof chunk === 'string') { + if (state.decodeStrings !== false) { + chunk = Buffer.from(chunk, encoding) + encoding = 'buffer' + } + } else if (chunk instanceof Buffer) { + encoding = 'buffer' + } else if (Stream._isUint8Array(chunk)) { + chunk = Stream._uint8ArrayToBuffer(chunk) + encoding = 'buffer' + } else { + throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk) + } + } + let err + if (state.ending) { + err = new ERR_STREAM_WRITE_AFTER_END() + } else if (state.destroyed) { + err = new ERR_STREAM_DESTROYED('write') + } + if (err) { + process.nextTick(cb, err) + errorOrDestroy(stream, err, true) + return err + } + state.pendingcb++ + return writeOrBuffer(stream, state, chunk, encoding, cb) +} +Writable.prototype.write = function (chunk, encoding, cb) { + return _write(this, chunk, encoding, cb) === true +} +Writable.prototype.cork = function () { + this._writableState.corked++ +} +Writable.prototype.uncork = function () { + const state = this._writableState + if (state.corked) { + state.corked-- + if (!state.writing) clearBuffer(this, state) + } +} +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = StringPrototypeToLowerCase(encoding) + if (!Buffer.isEncoding(encoding)) throw new ERR_UNKNOWN_ENCODING(encoding) + this._writableState.defaultEncoding = encoding + return this +} - clearStateChar() - re += '|' - // next subpattern can start with a dot? - if (plEntry.start === 0 && plEntry.type !== '!') { - dotTravAllowed = true - re += subPatternStart(pattern.slice(i + 1)) - } - continue - } +// If we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, chunk, encoding, callback) { + const len = state.objectMode ? 1 : chunk.length + state.length += len - // these are mostly the same in regexp and glob - case '[': - // swallow any state-tracking char before the [ - clearStateChar() + // stream._write resets state.length + const ret = state.length < state.highWaterMark + // We must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true + if (state.writing || state.corked || state.errored || !state.constructed) { + state.buffered.push({ + chunk, + encoding, + callback + }) + if (state.allBuffers && encoding !== 'buffer') { + state.allBuffers = false + } + if (state.allNoop && callback !== nop) { + state.allNoop = false + } + } else { + state.writelen = len + state.writecb = callback + state.writing = true + state.sync = true + stream._write(chunk, encoding, state.onwrite) + state.sync = false + } - if (inClass) { - re += '\\' + c - continue - } + // Return false if errored or destroyed in order to break + // any synchronous while(stream.write(data)) loops. + return ret && !state.errored && !state.destroyed +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len + state.writecb = cb + state.writing = true + state.sync = true + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write')) + else if (writev) stream._writev(chunk, state.onwrite) + else stream._write(chunk, encoding, state.onwrite) + state.sync = false +} +function onwriteError(stream, state, er, cb) { + --state.pendingcb + cb(er) + // Ensure callbacks are invoked even when autoDestroy is + // not enabled. Passing `er` here doesn't make sense since + // it's related to one specific write, not to the buffered + // writes. + errorBuffer(state) + // This can emit error, but error must always follow cb. + errorOrDestroy(stream, er) +} +function onwrite(stream, er) { + const state = stream._writableState + const sync = state.sync + const cb = state.writecb + if (typeof cb !== 'function') { + errorOrDestroy(stream, new ERR_MULTIPLE_CALLBACK()) + return + } + state.writing = false + state.writecb = null + state.length -= state.writelen + state.writelen = 0 + if (er) { + // Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364 + er.stack // eslint-disable-line no-unused-expressions - inClass = true - classStart = i - reClassStart = re.length - re += c - continue + if (!state.errored) { + state.errored = er + } - case ']': - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += '\\' + c - continue - } - - // split where the last [ was, make sure we don't have - // an invalid re. if so, re-walk the contents of the - // would-be class to re-translate any characters that - // were passed through as-is - // TODO: It would probably be faster to determine this - // without a try/catch and a new RegExp, but it's tricky - // to do safely. For now, this is safe and works. - cs = pattern.substring(classStart + 1, i) - try { - RegExp('[' + braExpEscape(charUnescape(cs)) + ']') - // looks good, finish up the class. - re += c - } catch (er) { - // out of order ranges in JS are errors, but in glob syntax, - // they're just a range that matches nothing. - re = re.substring(0, reClassStart) + '(?:$.)' // match nothing ever - } - hasMagic = true - inClass = false - continue - - default: - // swallow any state char that wasn't consumed - clearStateChar() - - if (reSpecials[c] && !(c === '^' && inClass)) { - re += '\\' - } - - re += c - break - - } // switch - } // for - - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - cs = pattern.slice(classStart + 1) - sp = this.parse(cs, SUBPARSE) - re = re.substring(0, reClassStart) + '\\[' + sp[0] - hasMagic = hasMagic || sp[1] - } - - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - let tail - tail = re.slice(pl.reStart + pl.open.length) - this.debug('setting tail', re, pl) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, (_, $1, $2) => { - /* istanbul ignore else - should already be done */ - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = '\\' - } - - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + '|' - }) - - this.debug('tail=%j\n %s', tail, tail, pl, re) - const t = pl.type === '*' ? star - : pl.type === '?' ? qmark - : '\\' + pl.type - - hasMagic = true - re = re.slice(0, pl.reStart) + t + '\\(' + tail - } - - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += '\\\\' - } - - // only need to apply the nodot start if the re starts with - // something that could conceivably capture a dot - const addPatternStart = addPatternStartSet[re.charAt(0)] - - // Hack to work around lack of negative lookbehind in JS - // A pattern like: *.!(x).!(y|z) needs to ensure that a name - // like 'a.xyz.yz' doesn't match. So, the first negative - // lookahead, has to look ALL the way ahead, to the end of - // the pattern. - for (let n = negativeLists.length - 1; n > -1; n--) { - const nl = negativeLists[n] - - const nlBefore = re.slice(0, nl.reStart) - const nlFirst = re.slice(nl.reStart, nl.reEnd - 8) - let nlAfter = re.slice(nl.reEnd) - const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter - - // Handle nested stuff like *(*.js|!(*.json)), where open parens - // mean that we should *not* include the ) in the bit that is considered - // "after" the negated section. - const closeParensBefore = nlBefore.split(')').length - const openParensBefore = nlBefore.split('(').length - closeParensBefore - let cleanAfter = nlAfter - for (let i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') - } - nlAfter = cleanAfter - - const dollar = nlAfter === '' && isSub !== SUBPARSE ? '(?:$|\\/)' : '' - - re = nlBefore + nlFirst + nlAfter + dollar + nlLast - } - - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== '' && hasMagic) { - re = '(?=.)' + re - } - - if (addPatternStart) { - re = patternStart() + re - } - - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [re, hasMagic] - } - - // if it's nocase, and the lcase/uppercase don't match, it's magic - if (options.nocase && !hasMagic) { - hasMagic = pattern.toUpperCase() !== pattern.toLowerCase() - } - - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) + // In case of duplex streams we need to notify the readable side of the + // error. + if (stream._readableState && !stream._readableState.errored) { + stream._readableState.errored = er } - - const flags = options.nocase ? 'i' : '' - try { - return Object.assign(new RegExp('^' + re + '$', flags), { - _glob: pattern, - _src: re, - }) - } catch (er) /* istanbul ignore next - should be impossible */ { - // If it was an invalid regular expression, then it can't match - // anything. This trick looks for a character after the end of - // the string, which is of course impossible, except in multi-line - // mode, but it's not a /m regex. - return new RegExp('$.') + if (sync) { + process.nextTick(onwriteError, stream, state, er, cb) + } else { + onwriteError(stream, state, er, cb) } - } - - makeRe () { - if (this.regexp || this.regexp === false) return this.regexp - - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - const set = this.set - - if (!set.length) { - this.regexp = false - return this.regexp + } else { + if (state.buffered.length > state.bufferedIndex) { + clearBuffer(stream, state) } - const options = this.options - - const twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - const flags = options.nocase ? 'i' : '' - - // coalesce globstars and regexpify non-globstar patterns - // if it's the only item, then we just do one twoStar - // if it's the first, and there are more, prepend (\/|twoStar\/)? to next - // if it's the last, append (\/twoStar|) to previous - // if it's in the middle, append (\/|\/twoStar\/) to previous - // then filter out GLOBSTAR symbols - let re = set.map(pattern => { - pattern = pattern.map(p => - typeof p === 'string' ? regExpEscape(p) - : p === GLOBSTAR ? GLOBSTAR - : p._src - ).reduce((set, p) => { - if (!(set[set.length - 1] === GLOBSTAR && p === GLOBSTAR)) { - set.push(p) - } - return set - }, []) - pattern.forEach((p, i) => { - if (p !== GLOBSTAR || pattern[i-1] === GLOBSTAR) { - return - } - if (i === 0) { - if (pattern.length > 1) { - pattern[i+1] = '(?:\\\/|' + twoStar + '\\\/)?' + pattern[i+1] - } else { - pattern[i] = twoStar - } - } else if (i === pattern.length - 1) { - pattern[i-1] += '(?:\\\/|' + twoStar + ')?' - } else { - pattern[i-1] += '(?:\\\/|\\\/' + twoStar + '\\\/)' + pattern[i+1] - pattern[i+1] = GLOBSTAR + if (sync) { + // It is a common case that the callback passed to .write() is always + // the same. In that case, we do not schedule a new nextTick(), but + // rather just increase a counter, to improve performance and avoid + // memory allocations. + if (state.afterWriteTickInfo !== null && state.afterWriteTickInfo.cb === cb) { + state.afterWriteTickInfo.count++ + } else { + state.afterWriteTickInfo = { + count: 1, + cb, + stream, + state } - }) - return pattern.filter(p => p !== GLOBSTAR).join('/') - }).join('|') - - // must match entire pattern - // ending in a * or ** will make it less strict. - re = '^(?:' + re + ')$' - - // can match anything, as long as it's not this. - if (this.negate) re = '^(?!' + re + ').*$' - - try { - this.regexp = new RegExp(re, flags) - } catch (ex) /* istanbul ignore next - should be impossible */ { - this.regexp = false - } - return this.regexp - } - - match (f, partial = this.partial) { - this.debug('match', f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === '' - - if (f === '/' && partial) return true - - const options = this.options - - // windows: need to use /, not \ - if (path.sep !== '/') { - f = f.split(path.sep).join('/') - } - - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - this.debug(this.pattern, 'split', f) - - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. - - const set = this.set - this.debug(this.pattern, 'set', set) - - // Find the basename of the path by looking for the last non-empty segment - let filename - for (let i = f.length - 1; i >= 0; i--) { - filename = f[i] - if (filename) break - } - - for (let i = 0; i < set.length; i++) { - const pattern = set[i] - let file = f - if (options.matchBase && pattern.length === 1) { - file = [filename] - } - const hit = this.matchOne(file, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate + process.nextTick(afterWriteTick, state.afterWriteTickInfo) } + } else { + afterWrite(stream, state, 1, cb) } - - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate - } - - static defaults (def) { - return minimatch.defaults(def).Minimatch } } - -minimatch.Minimatch = Minimatch - - -/***/ }), - -/***/ 78838: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -;(function (sax) { // wrapper for non-node envs - sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } - sax.SAXParser = SAXParser - sax.SAXStream = SAXStream - sax.createStream = createStream - - // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. - // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), - // since that's the earliest that a buffer overrun could occur. This way, checks are - // as rare as required, but as often as necessary to ensure never crossing this bound. - // Furthermore, buffers are only tested at most once per write(), so passing a very - // large string into write() might have undesirable effects, but this is manageable by - // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme - // edge case, result in creating at most one complete copy of the string passed in. - // Set to Infinity to have unlimited buffers. - sax.MAX_BUFFER_LENGTH = 64 * 1024 - - var buffers = [ - 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', - 'procInstName', 'procInstBody', 'entity', 'attribName', - 'attribValue', 'cdata', 'script' - ] - - sax.EVENTS = [ - 'text', - 'processinginstruction', - 'sgmldeclaration', - 'doctype', - 'comment', - 'opentagstart', - 'attribute', - 'opentag', - 'closetag', - 'opencdata', - 'cdata', - 'closecdata', - 'error', - 'end', - 'ready', - 'script', - 'opennamespace', - 'closenamespace' - ] - - function SAXParser (strict, opt) { - if (!(this instanceof SAXParser)) { - return new SAXParser(strict, opt) - } - - var parser = this - clearBuffers(parser) - parser.q = parser.c = '' - parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH - parser.opt = opt || {} - parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags - parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' - parser.tags = [] - parser.closed = parser.closedRoot = parser.sawRoot = false - parser.tag = parser.error = null - parser.strict = !!strict - parser.noscript = !!(strict || parser.opt.noscript) - parser.state = S.BEGIN - parser.strictEntities = parser.opt.strictEntities - parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) - parser.attribList = [] - - // namespaces form a prototype chain. - // it always points at the current tag, - // which protos to its parent tag. - if (parser.opt.xmlns) { - parser.ns = Object.create(rootNS) - } - - // disallow unquoted attribute values if not otherwise configured - // and strict mode is true - if (parser.opt.unquotedAttributeValues === undefined) { - parser.opt.unquotedAttributeValues = !strict; - } - - // mostly just for error reporting - parser.trackPosition = parser.opt.position !== false - if (parser.trackPosition) { - parser.position = parser.line = parser.column = 0 - } - emit(parser, 'onready') - } - - if (!Object.create) { - Object.create = function (o) { - function F () {} - F.prototype = o - var newf = new F() - return newf - } - } - - if (!Object.keys) { - Object.keys = function (o) { - var a = [] - for (var i in o) if (o.hasOwnProperty(i)) a.push(i) - return a - } - } - - function checkBufferLength (parser) { - var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) - var maxActual = 0 - for (var i = 0, l = buffers.length; i < l; i++) { - var len = parser[buffers[i]].length - if (len > maxAllowed) { - // Text/cdata nodes can get big, and since they're buffered, - // we can get here under normal conditions. - // Avoid issues by emitting the text node now, - // so at least it won't get any bigger. - switch (buffers[i]) { - case 'textNode': - closeText(parser) - break - - case 'cdata': - emitNode(parser, 'oncdata', parser.cdata) - parser.cdata = '' - break - - case 'script': - emitNode(parser, 'onscript', parser.script) - parser.script = '' - break - - default: - error(parser, 'Max buffer length exceeded: ' + buffers[i]) - } - } - maxActual = Math.max(maxActual, len) - } - // schedule the next check for the earliest possible buffer overrun. - var m = sax.MAX_BUFFER_LENGTH - maxActual - parser.bufferCheckPosition = m + parser.position +function afterWriteTick({ stream, state, count, cb }) { + state.afterWriteTickInfo = null + return afterWrite(stream, state, count, cb) +} +function afterWrite(stream, state, count, cb) { + const needDrain = !state.ending && !stream.destroyed && state.length === 0 && state.needDrain + if (needDrain) { + state.needDrain = false + stream.emit('drain') } - - function clearBuffers (parser) { - for (var i = 0, l = buffers.length; i < l; i++) { - parser[buffers[i]] = '' - } + while (count-- > 0) { + state.pendingcb-- + cb() } - - function flushBuffers (parser) { - closeText(parser) - if (parser.cdata !== '') { - emitNode(parser, 'oncdata', parser.cdata) - parser.cdata = '' - } - if (parser.script !== '') { - emitNode(parser, 'onscript', parser.script) - parser.script = '' - } + if (state.destroyed) { + errorBuffer(state) } + finishMaybe(stream, state) +} - SAXParser.prototype = { - end: function () { end(this) }, - write: write, - resume: function () { this.error = null; return this }, - close: function () { return this.write(null) }, - flush: function () { flushBuffers(this) } +// If there's something in the buffer waiting, then invoke callbacks. +function errorBuffer(state) { + if (state.writing) { + return } - - var Stream - try { - Stream = (__nccwpck_require__(12781).Stream) - } catch (ex) { - Stream = function () {} + for (let n = state.bufferedIndex; n < state.buffered.length; ++n) { + var _state$errored + const { chunk, callback } = state.buffered[n] + const len = state.objectMode ? 1 : chunk.length + state.length -= len + callback( + (_state$errored = state.errored) !== null && _state$errored !== undefined + ? _state$errored + : new ERR_STREAM_DESTROYED('write') + ) } - if (!Stream) Stream = function () {} - - var streamWraps = sax.EVENTS.filter(function (ev) { - return ev !== 'error' && ev !== 'end' - }) - - function createStream (strict, opt) { - return new SAXStream(strict, opt) + const onfinishCallbacks = state[kOnFinished].splice(0) + for (let i = 0; i < onfinishCallbacks.length; i++) { + var _state$errored2 + onfinishCallbacks[i]( + (_state$errored2 = state.errored) !== null && _state$errored2 !== undefined + ? _state$errored2 + : new ERR_STREAM_DESTROYED('end') + ) } + resetBuffer(state) +} - function SAXStream (strict, opt) { - if (!(this instanceof SAXStream)) { - return new SAXStream(strict, opt) - } - - Stream.apply(this) - - this._parser = new SAXParser(strict, opt) - this.writable = true - this.readable = true - - var me = this - - this._parser.onend = function () { - me.emit('end') - } - - this._parser.onerror = function (er) { - me.emit('error', er) - - // if didn't throw, then means error was handled. - // go ahead and clear error, so we can write again. - me._parser.error = null - } - - this._decoder = null - - streamWraps.forEach(function (ev) { - Object.defineProperty(me, 'on' + ev, { - get: function () { - return me._parser['on' + ev] - }, - set: function (h) { - if (!h) { - me.removeAllListeners(ev) - me._parser['on' + ev] = h - return h - } - me.on(ev, h) - }, - enumerable: true, - configurable: false - }) - }) +// If there's something in the buffer waiting, then process it. +function clearBuffer(stream, state) { + if (state.corked || state.bufferProcessing || state.destroyed || !state.constructed) { + return } - - SAXStream.prototype = Object.create(Stream.prototype, { - constructor: { - value: SAXStream - } - }) - - SAXStream.prototype.write = function (data) { - if (typeof Buffer === 'function' && - typeof Buffer.isBuffer === 'function' && - Buffer.isBuffer(data)) { - if (!this._decoder) { - var SD = (__nccwpck_require__(71576).StringDecoder) - this._decoder = new SD('utf8') - } - data = this._decoder.write(data) - } - - this._parser.write(data.toString()) - this.emit('data', data) - return true + const { buffered, bufferedIndex, objectMode } = state + const bufferedLength = buffered.length - bufferedIndex + if (!bufferedLength) { + return } - - SAXStream.prototype.end = function (chunk) { - if (chunk && chunk.length) { - this.write(chunk) + let i = bufferedIndex + state.bufferProcessing = true + if (bufferedLength > 1 && stream._writev) { + state.pendingcb -= bufferedLength - 1 + const callback = state.allNoop + ? nop + : (err) => { + for (let n = i; n < buffered.length; ++n) { + buffered[n].callback(err) + } + } + // Make a copy of `buffered` if it's going to be used by `callback` above, + // since `doWrite` will mutate the array. + const chunks = state.allNoop && i === 0 ? buffered : ArrayPrototypeSlice(buffered, i) + chunks.allBuffers = state.allBuffers + doWrite(stream, state, true, state.length, chunks, '', callback) + resetBuffer(state) + } else { + do { + const { chunk, encoding, callback } = buffered[i] + buffered[i++] = null + const len = objectMode ? 1 : chunk.length + doWrite(stream, state, false, len, chunk, encoding, callback) + } while (i < buffered.length && !state.writing) + if (i === buffered.length) { + resetBuffer(state) + } else if (i > 256) { + buffered.splice(0, i) + state.bufferedIndex = 0 + } else { + state.bufferedIndex = i } - this._parser.end() - return true - } - - SAXStream.prototype.on = function (ev, handler) { - var me = this - if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { - me._parser['on' + ev] = function () { - var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) - args.splice(0, 0, ev) - me.emit.apply(me, args) - } - } - - return Stream.prototype.on.call(me, ev, handler) - } - - // this really needs to be replaced with character classes. - // XML allows all manner of ridiculous numbers and digits. - var CDATA = '[CDATA[' - var DOCTYPE = 'DOCTYPE' - var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' - var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' - var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } - - // http://www.w3.org/TR/REC-xml/#NT-NameStartChar - // This implementation works on strings, a single character at a time - // as such, it cannot ever support astral-plane characters (10000-EFFFF) - // without a significant breaking change to either this parser, or the - // JavaScript language. Implementation of an emoji-capable xml parser - // is left as an exercise for the reader. - var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ - - var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ - - var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ - var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ - - function isWhitespace (c) { - return c === ' ' || c === '\n' || c === '\r' || c === '\t' - } - - function isQuote (c) { - return c === '"' || c === '\'' - } - - function isAttribEnd (c) { - return c === '>' || isWhitespace(c) - } - - function isMatch (regex, c) { - return regex.test(c) - } - - function notMatch (regex, c) { - return !isMatch(regex, c) - } - - var S = 0 - sax.STATE = { - BEGIN: S++, // leading byte order mark or whitespace - BEGIN_WHITESPACE: S++, // leading whitespace - TEXT: S++, // general stuff - TEXT_ENTITY: S++, // & and such. - OPEN_WAKA: S++, // < - SGML_DECL: S++, // - SCRIPT: S++, //