diff --git a/dist/index.js b/dist/index.js index 0478529..46bce16 100644 --- a/dist/index.js +++ b/dist/index.js @@ -93645,6 +93645,132 @@ function onConnectTimeout (socket) { module.exports = buildConnector +/***/ }), + +/***/ 14462: +/***/ ((module) => { + +"use strict"; + + +/** @type {Record} */ +const headerNameLowerCasedRecord = {} + +// https://developer.mozilla.org/docs/Web/HTTP/Headers +const wellknownHeaderNames = [ + 'Accept', + 'Accept-Encoding', + 'Accept-Language', + 'Accept-Ranges', + 'Access-Control-Allow-Credentials', + 'Access-Control-Allow-Headers', + 'Access-Control-Allow-Methods', + 'Access-Control-Allow-Origin', + 'Access-Control-Expose-Headers', + 'Access-Control-Max-Age', + 'Access-Control-Request-Headers', + 'Access-Control-Request-Method', + 'Age', + 'Allow', + 'Alt-Svc', + 'Alt-Used', + 'Authorization', + 'Cache-Control', + 'Clear-Site-Data', + 'Connection', + 'Content-Disposition', + 'Content-Encoding', + 'Content-Language', + 'Content-Length', + 'Content-Location', + 'Content-Range', + 'Content-Security-Policy', + 'Content-Security-Policy-Report-Only', + 'Content-Type', + 'Cookie', + 'Cross-Origin-Embedder-Policy', + 'Cross-Origin-Opener-Policy', + 'Cross-Origin-Resource-Policy', + 'Date', + 'Device-Memory', + 'Downlink', + 'ECT', + 'ETag', + 'Expect', + 'Expect-CT', + 'Expires', + 'Forwarded', + 'From', + 'Host', + 'If-Match', + 'If-Modified-Since', + 'If-None-Match', + 'If-Range', + 'If-Unmodified-Since', + 'Keep-Alive', + 'Last-Modified', + 'Link', + 'Location', + 'Max-Forwards', + 'Origin', + 'Permissions-Policy', + 'Pragma', + 'Proxy-Authenticate', + 'Proxy-Authorization', + 'RTT', + 'Range', + 'Referer', + 'Referrer-Policy', + 'Refresh', + 'Retry-After', + 'Sec-WebSocket-Accept', + 'Sec-WebSocket-Extensions', + 'Sec-WebSocket-Key', + 'Sec-WebSocket-Protocol', + 'Sec-WebSocket-Version', + 'Server', + 'Server-Timing', + 'Service-Worker-Allowed', + 'Service-Worker-Navigation-Preload', + 'Set-Cookie', + 'SourceMap', + 'Strict-Transport-Security', + 'Supports-Loading-Mode', + 'TE', + 'Timing-Allow-Origin', + 'Trailer', + 'Transfer-Encoding', + 'Upgrade', + 'Upgrade-Insecure-Requests', + 'User-Agent', + 'Vary', + 'Via', + 'WWW-Authenticate', + 'X-Content-Type-Options', + 'X-DNS-Prefetch-Control', + 'X-Frame-Options', + 'X-Permitted-Cross-Domain-Policies', + 'X-Powered-By', + 'X-Requested-With', + 'X-XSS-Protection' +] + +for (let i = 0; i < wellknownHeaderNames.length; ++i) { + const key = wellknownHeaderNames[i] + const lowerCasedKey = key.toLowerCase() + headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = + lowerCasedKey +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(headerNameLowerCasedRecord, null) + +module.exports = { + wellknownHeaderNames, + headerNameLowerCasedRecord +} + + /***/ }), /***/ 48045: @@ -94477,6 +94603,7 @@ const { InvalidArgumentError } = __nccwpck_require__(48045) const { Blob } = __nccwpck_require__(14300) const nodeUtil = __nccwpck_require__(73837) const { stringify } = __nccwpck_require__(63477) +const { headerNameLowerCasedRecord } = __nccwpck_require__(14462) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) @@ -94686,6 +94813,15 @@ function parseKeepAliveTimeout (val) { return m ? parseInt(m[1], 10) * 1000 : null } +/** + * Retrieves a header name and returns its lowercase value. + * @param {string | Buffer} value Header name + * @returns {string} + */ +function headerNameToString (value) { + return headerNameLowerCasedRecord[value] || value.toLowerCase() +} + function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers @@ -94957,6 +95093,7 @@ module.exports = { isIterable, isAsyncIterable, isDestroyed, + headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, @@ -101604,14 +101741,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(8398 const assert = __nccwpck_require__(39491) const { isUint8Array } = __nccwpck_require__(29830) +let supportedHashes = [] + // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(6113) + const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) +/* c8 ignore next 3 */ } catch { - } function responseURL (response) { @@ -102139,66 +102280,56 @@ function bytesMatch (bytes, metadataList) { return true } - // 3. If parsedMetadata is the empty set, return true. + // 3. If response is not eligible for integrity validation, return false. + // TODO + + // 4. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } - // 4. Let metadata be the result of getting the strongest + // 5. Let metadata be the result of getting the strongest // metadata from parsedMetadata. - const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) - // get the strongest algorithm - const strongest = list[0].algo - // get all entries that use the strongest algorithm; ignore weaker - const metadata = list.filter((item) => item.algo === strongest) + const strongest = getStrongestMetadata(parsedMetadata) + const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) - // 5. For each item in metadata: + // 6. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. - let expectedValue = item.hash + const expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. - if (expectedValue.endsWith('==')) { - expectedValue = expectedValue.slice(0, -2) - } - // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') - if (actualValue.endsWith('==')) { - actualValue = actualValue.slice(0, -2) + if (actualValue[actualValue.length - 1] === '=') { + if (actualValue[actualValue.length - 2] === '=') { + actualValue = actualValue.slice(0, -2) + } else { + actualValue = actualValue.slice(0, -1) + } } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. - if (actualValue === expectedValue) { - return true - } - - let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') - - if (actualBase64URL.endsWith('==')) { - actualBase64URL = actualBase64URL.slice(0, -2) - } - - if (actualBase64URL === expectedValue) { + if (compareBase64Mixed(actualValue, expectedValue)) { return true } } - // 6. Return false. + // 7. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 -const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i +const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata @@ -102212,8 +102343,6 @@ function parseMetadata (metadata) { // 2. Let empty be equal to true. let empty = true - const supportedHashes = crypto.getHashes() - // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. @@ -102223,7 +102352,11 @@ function parseMetadata (metadata) { const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. - if (parsedToken === null || parsedToken.groups === undefined) { + if ( + parsedToken === null || + parsedToken.groups === undefined || + parsedToken.groups.algo === undefined + ) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not @@ -102232,11 +102365,11 @@ function parseMetadata (metadata) { } // 4. Let algorithm be the hash-algo component of token. - const algorithm = parsedToken.groups.algo + const algorithm = parsedToken.groups.algo.toLowerCase() // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. - if (supportedHashes.includes(algorithm.toLowerCase())) { + if (supportedHashes.includes(algorithm)) { result.push(parsedToken.groups) } } @@ -102249,6 +102382,82 @@ function parseMetadata (metadata) { return result } +/** + * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList + */ +function getStrongestMetadata (metadataList) { + // Let algorithm be the algo component of the first item in metadataList. + // Can be sha256 + let algorithm = metadataList[0].algo + // If the algorithm is sha512, then it is the strongest + // and we can return immediately + if (algorithm[3] === '5') { + return algorithm + } + + for (let i = 1; i < metadataList.length; ++i) { + const metadata = metadataList[i] + // If the algorithm is sha512, then it is the strongest + // and we can break the loop immediately + if (metadata.algo[3] === '5') { + algorithm = 'sha512' + break + // If the algorithm is sha384, then a potential sha256 or sha384 is ignored + } else if (algorithm[3] === '3') { + continue + // algorithm is sha256, check if algorithm is sha384 and if so, set it as + // the strongest + } else if (metadata.algo[3] === '3') { + algorithm = 'sha384' + } + } + return algorithm +} + +function filterMetadataListByAlgorithm (metadataList, algorithm) { + if (metadataList.length === 1) { + return metadataList + } + + let pos = 0 + for (let i = 0; i < metadataList.length; ++i) { + if (metadataList[i].algo === algorithm) { + metadataList[pos++] = metadataList[i] + } + } + + metadataList.length = pos + + return metadataList +} + +/** + * Compares two base64 strings, allowing for base64url + * in the second string. + * +* @param {string} actualValue always base64 + * @param {string} expectedValue base64 or base64url + * @returns {boolean} + */ +function compareBase64Mixed (actualValue, expectedValue) { + if (actualValue.length !== expectedValue.length) { + return false + } + for (let i = 0; i < actualValue.length; ++i) { + if (actualValue[i] !== expectedValue[i]) { + if ( + (actualValue[i] === '+' && expectedValue[i] === '-') || + (actualValue[i] === '/' && expectedValue[i] === '_') + ) { + continue + } + return false + } + } + + return true +} + // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO @@ -102664,7 +102873,8 @@ module.exports = { urlHasHttpsScheme, urlIsHttpHttpsScheme, readAllBytes, - normalizeMethodRecord + normalizeMethodRecord, + parseMetadata } @@ -104751,12 +104961,17 @@ function parseLocation (statusCode, headers) { // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { - return ( - (header.length === 4 && header.toString().toLowerCase() === 'host') || - (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || - (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || - (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') - ) + if (header.length === 4) { + return util.headerNameToString(header) === 'host' + } + if (removeContent && util.headerNameToString(header).startsWith('content-')) { + return true + } + if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { + const name = util.headerNameToString(header) + return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' + } + return false } // https://tools.ietf.org/html/rfc7231#section-6.4 @@ -109870,7 +110085,7 @@ UnzipStream.prototype._prepareOutStream = function (vars, entry) { var isDirectory = vars.uncompressedSize === 0 && /[\/\\]$/.test(entry.path); // protect against malicious zip files which want to extract to parent dirs - entry.path = entry.path.replace(/^([/\\]*[.]+[/\\]+)*[/\\]*/, ""); + entry.path = entry.path.replace(/(?<=^|[/\\]+)[.][.]+(?=[/\\]+|$)/g, "."); entry.type = isDirectory ? 'Directory' : 'File'; entry.isDirectory = isDirectory; @@ -109934,7 +110149,7 @@ UnzipStream.prototype._prepareOutStream = function (vars, entry) { entry.skip = true; setImmediate(() => { - entry.emit("error", new Error(message)); + self.emit('error', new Error(message)); }); // try to skip over this entry diff --git a/package-lock.json b/package-lock.json index 4098c0e..349c2c4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1431,9 +1431,9 @@ } }, "node_modules/undici": { - "version": "5.28.3", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", - "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", "dependencies": { "@fastify/busboy": "^2.0.0" }, @@ -1452,9 +1452,9 @@ "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" }, "node_modules/unzip-stream": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.1.tgz", - "integrity": "sha512-RzaGXLNt+CW+T41h1zl6pGz3EaeVhYlK+rdAap+7DxW5kqsqePO8kRtWPaCiVqdhZc86EctSPVYNix30YOMzmw==", + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.4.tgz", + "integrity": "sha512-PyofABPVv+d7fL7GOpusx7eRT9YETY2X04PhwbSipdj6bMxVCFJrr+nm0Mxqbf9hUiTin/UsnuFWBXlDZFy0Cw==", "dependencies": { "binary": "^0.3.0", "mkdirp": "^0.5.1"