diff --git a/.buildkite/bootstrap.yml b/.buildkite/bootstrap.yml index b0b84616b3eb50..5a75106d5e6ea3 100644 --- a/.buildkite/bootstrap.yml +++ b/.buildkite/bootstrap.yml @@ -13,19 +13,4 @@ steps: agents: queue: "build-darwin" command: - - ".buildkite/scripts/prepare-build.sh" - - - if: "build.branch == 'main' && !build.pull_request.repository.fork" - label: ":github:" - agents: - queue: "test-darwin" - depends_on: - - "darwin-aarch64-build-bun" - - "darwin-x64-build-bun" - - "linux-aarch64-build-bun" - - "linux-x64-build-bun" - - "linux-x64-baseline-build-bun" - - "windows-x64-build-bun" - - "windows-x64-baseline-build-bun" - command: - - ".buildkite/scripts/upload-release.sh" + - "node .buildkite/ci.mjs" diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 7d12333ad4211b..fcb57bfa493241 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -11,6 +11,7 @@ import { getBuildkiteEmoji, getBuildMetadata, getBuildNumber, + getCanaryRevision, getCommitMessage, getEmoji, getEnv, @@ -43,7 +44,6 @@ import { * @property {Arch} arch * @property {Abi} [abi] * @property {boolean} [baseline] - * @property {boolean} [canary] * @property {Profile} [profile] */ @@ -91,7 +91,6 @@ function getTargetLabel(target) { * @property {Arch} arch * @property {Abi} [abi] * @property {boolean} [baseline] - * @property {boolean} [canary] * @property {Profile} [profile] * @property {Distro} [distro] * @property {string} release @@ -200,19 +199,19 @@ function getImageLabel(platform) { /** * @param {Platform} platform - * @param {boolean} [dryRun] + * @param {PipelineOptions} options * @returns {string} */ -function getImageName(platform, dryRun) { - const { os, arch, distro, release, features } = platform; - let name = `${os}-${arch}-${release}`; - if (distro) { - name += `-${distro}`; - } +function getImageName(platform, options) { + const { os, arch, distro, release } = platform; + const { buildImages, publishImages } = options; + const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; + if (features?.length) { name += `-with-${features.join("-")}`; } - if (dryRun) { + + if (buildImages && !publishImages) { return `${name}-build-${getBuildNumber()}`; } return `${name}-v${getBootstrapVersion(os)}`; @@ -268,23 +267,22 @@ function getPriority() { /** * @param {Platform} platform - * @param {Ec2Options} options + * @param {PipelineOptions} options + * @param {Ec2Options} ec2Options * @returns {Agent} */ -function getEc2Agent(platform, options) { +function getEc2Agent(platform, options, ec2Options) { const { os, arch, abi, distro, release } = platform; - const { instanceType, cpuCount, threadsPerCore, dryRun } = options; + const { instanceType, cpuCount, threadsPerCore } = ec2Options; return { os, arch, abi, distro, release, - // The agent is created by robobun, see more details here: - // https://github.com/oven-sh/robobun/blob/d46c07e0ac5ac0f9ffe1012f0e98b59e1a0d387a/src/robobun.ts#L1707 robobun: true, robobun2: true, - "image-name": getImageName(platform, dryRun), + "image-name": getImageName(platform, options), "instance-type": instanceType, "cpu-count": cpuCount, "threads-per-core": threadsPerCore, @@ -294,9 +292,10 @@ function getEc2Agent(platform, options) { /** * @param {Platform} platform - * @returns {Agent} + * @param {PipelineOptions} options + * @returns {string} */ -function getCppAgent(platform, dryRun) { +function getCppAgent(platform, options) { const { os, arch, distro } = platform; if (os === "darwin") { @@ -307,46 +306,32 @@ function getCppAgent(platform, dryRun) { }; } - return getEc2Agent(platform, { + return getEc2Agent(platform, options, { instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge", cpuCount: 32, threadsPerCore: 1, - dryRun, }); } /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {Agent} */ -function getZigAgent(platform, dryRun) { +function getZigAgent(platform, options) { const { arch } = platform; - return { queue: "build-zig", }; - - // return getEc2Agent( - // { - // os: "linux", - // arch, - // distro: "debian", - // release: "11", - // }, - // { - // instanceType: arch === "aarch64" ? "c8g.2xlarge" : "c7i.2xlarge", - // cpuCount: 4, - // threadsPerCore: 1, - // }, - // ); } /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {Agent} */ -function getTestAgent(platform, dryRun) { - const { os, arch, distro } = platform; +function getTestAgent(platform, options) { + const { os, arch } = platform; if (os === "darwin") { return { @@ -358,28 +343,25 @@ function getTestAgent(platform, dryRun) { // TODO: `dev-server-ssr-110.test.ts` and `next-build.test.ts` run out of memory at 8GB of memory, so use 16GB instead. if (os === "windows") { - return getEc2Agent(platform, { + return getEc2Agent(platform, options, { instanceType: "c7i.2xlarge", cpuCount: 2, threadsPerCore: 1, - dryRun, }); } if (arch === "aarch64") { - return getEc2Agent(platform, { + return getEc2Agent(platform, options, { instanceType: "c8g.xlarge", cpuCount: 2, threadsPerCore: 1, - dryRun, }); } - return getEc2Agent(platform, { + return getEc2Agent(platform, options, { instanceType: "c7i.xlarge", cpuCount: 2, threadsPerCore: 1, - dryRun, }); } @@ -389,16 +371,20 @@ function getTestAgent(platform, dryRun) { /** * @param {Target} target + * @param {PipelineOptions} options * @returns {Record} */ -function getBuildEnv(target) { - const { profile, baseline, canary, abi } = target; +function getBuildEnv(target, options) { + const { profile, baseline, abi } = target; const release = !profile || profile === "release"; + const { canary } = options; + const revision = typeof canary === "number" ? canary : 1; return { CMAKE_BUILD_TYPE: release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo", ENABLE_BASELINE: baseline ? "ON" : "OFF", - ENABLE_CANARY: canary ? "ON" : "OFF", + ENABLE_CANARY: revision > 0 ? "ON" : "OFF", + CANARY_REVISION: revision, ENABLE_ASSERTIONS: release ? "OFF" : "ON", ENABLE_LOGS: release ? "OFF" : "ON", ABI: abi === "musl" ? "musl" : undefined, @@ -407,36 +393,36 @@ function getBuildEnv(target) { /** * @param {Platform} platform - * @param {boolean} dryRun + * @param {PipelineOptions} options * @returns {Step} */ -function getBuildVendorStep(platform, dryRun) { +function getBuildVendorStep(platform, options) { return { key: `${getTargetKey(platform)}-build-vendor`, label: `${getTargetLabel(platform)} - build-vendor`, - agents: getCppAgent(platform, dryRun), + agents: getCppAgent(platform, options), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), - env: getBuildEnv(platform), + env: getBuildEnv(platform, options), command: "bun run build:ci --target dependencies", }; } /** * @param {Platform} platform - * @param {boolean} dryRun + * @param {PipelineOptions} options * @returns {Step} */ -function getBuildCppStep(platform, dryRun) { +function getBuildCppStep(platform, options) { return { key: `${getTargetKey(platform)}-build-cpp`, label: `${getTargetLabel(platform)} - build-cpp`, - agents: getCppAgent(platform, dryRun), + agents: getCppAgent(platform, options), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: { BUN_CPP_ONLY: "ON", - ...getBuildEnv(platform), + ...getBuildEnv(platform, options), }, command: "bun run build:ci --target bun", }; @@ -460,27 +446,28 @@ function getBuildToolchain(target) { /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {Step} */ -function getBuildZigStep(platform, dryRun) { +function getBuildZigStep(platform, options) { const toolchain = getBuildToolchain(platform); return { key: `${getTargetKey(platform)}-build-zig`, label: `${getTargetLabel(platform)} - build-zig`, - agents: getZigAgent(platform, dryRun), + agents: getZigAgent(platform, options), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), - env: getBuildEnv(platform), + env: getBuildEnv(platform, options), command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`, }; } /** * @param {Platform} platform - * @param {boolean} dryRun + * @param {PipelineOptions} options * @returns {Step} */ -function getLinkBunStep(platform, dryRun) { +function getLinkBunStep(platform, options) { return { key: `${getTargetKey(platform)}-build-bun`, label: `${getTargetLabel(platform)} - build-bun`, @@ -489,12 +476,12 @@ function getLinkBunStep(platform, dryRun) { `${getTargetKey(platform)}-build-cpp`, `${getTargetKey(platform)}-build-zig`, ], - agents: getCppAgent(platform, dryRun), + agents: getCppAgent(platform, options), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: { BUN_LINK_ONLY: "ON", - ...getBuildEnv(platform), + ...getBuildEnv(platform, options), }, command: "bun run build:ci --target bun", }; @@ -502,17 +489,17 @@ function getLinkBunStep(platform, dryRun) { /** * @param {Platform} platform - * @param {boolean} dryRun + * @param {PipelineOptions} options * @returns {Step} */ -function getBuildBunStep(platform, dryRun) { +function getBuildBunStep(platform, options) { return { key: `${getTargetKey(platform)}-build-bun`, label: `${getTargetLabel(platform)} - build-bun`, - agents: getCppAgent(platform, dryRun), + agents: getCppAgent(platform, options), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), - env: getBuildEnv(platform), + env: getBuildEnv(platform, options), command: "bun run build:ci", }; } @@ -527,12 +514,13 @@ function getBuildBunStep(platform, dryRun) { /** * @param {Platform} platform - * @param {TestOptions} [options] + * @param {PipelineOptions} options + * @param {TestOptions} [testOptions] * @returns {Step} */ -function getTestBunStep(platform, options = {}) { +function getTestBunStep(platform, options, testOptions = {}) { const { os } = platform; - const { buildId, unifiedTests, testFiles, dryRun } = options; + const { buildId, unifiedTests, testFiles } = testOptions; const args = [`--step=${getTargetKey(platform)}-build-bun`]; if (buildId) { @@ -551,7 +539,7 @@ function getTestBunStep(platform, options = {}) { key: `${getPlatformKey(platform)}-test-bun`, label: `${getPlatformLabel(platform)} - test-bun`, depends_on: depends, - agents: getTestAgent(platform, dryRun), + agents: getTestAgent(platform, options), cancel_on_build_failing: isMergeQueue(), retry: getRetry(), soft_fail: isMainBranch() ? true : [{ exit_status: 2 }], @@ -565,15 +553,14 @@ function getTestBunStep(platform, options = {}) { /** * @param {Platform} platform - * @param {boolean} [dryRun] + * @param {PipelineOptions} options * @returns {Step} */ -function getBuildImageStep(platform, dryRun) { +function getBuildImageStep(platform, options) { const { os, arch, distro, release, features } = platform; + const { publishImages } = options; + const action = publishImages ? "publish-image" : "create-image"; - const action = dryRun ? "create-image" : "publish-image"; - - /** @type {string[]} */ const command = [ "node", "./scripts/machine.mjs", @@ -606,10 +593,14 @@ function getBuildImageStep(platform, dryRun) { } /** - * @param {Platform[]} [buildPlatforms] + * @param {Platform[]} buildPlatforms + * @param {PipelineOptions} options * @returns {Step} */ -function getReleaseStep(buildPlatforms) { +function getReleaseStep(buildPlatforms, options) { + const { canary } = options; + const revision = typeof canary === "number" ? canary : 1; + return { key: "release", label: getBuildkiteEmoji("rocket"), @@ -617,6 +608,9 @@ function getReleaseStep(buildPlatforms) { queue: "test-darwin", }, depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`), + env: { + CANARY: revision, + }, command: ".buildkite/scripts/upload-release.sh", }; } @@ -706,7 +700,7 @@ function getReleaseStep(buildPlatforms) { * @property {string | boolean} [forceTests] * @property {string | boolean} [buildImages] * @property {string | boolean} [publishImages] - * @property {boolean} [canary] + * @property {number} [canary] * @property {Profile[]} [buildProfiles] * @property {Platform[]} [buildPlatforms] * @property {Platform[]} [testPlatforms] @@ -924,6 +918,7 @@ async function getPipelineOptions() { return; } + const canary = await getCanaryRevision(); const buildPlatformsMap = new Map(buildPlatforms.map(platform => [getTargetKey(platform), platform])); const testPlatformsMap = new Map(testPlatforms.map(platform => [getPlatformKey(platform), platform])); @@ -946,7 +941,7 @@ async function getPipelineOptions() { const buildPlatformKeys = parseArray(options["build-platforms"]); const testPlatformKeys = parseArray(options["test-platforms"]); return { - canary: parseBoolean(options["canary"]), + canary: parseBoolean(options["canary"]) ? canary : 0, skipBuilds: parseBoolean(options["skip-builds"]), forceBuilds: parseBoolean(options["force-builds"]), skipTests: parseBoolean(options["skip-tests"]), @@ -981,10 +976,11 @@ async function getPipelineOptions() { return false; }; + const isCanary = + !parseBoolean(getEnv("RELEASE", false) || "false") && + !/\[(release|build release|release build)\]/i.test(commitMessage); return { - canary: - !parseBoolean(getEnv("RELEASE", false) || "false") && - !/\[(release|build release|release build)\]/i.test(commitMessage), + canary: isCanary ? canary : 0, skipEverything: parseOption(/\[(skip ci|no ci)\]/i), skipBuilds: parseOption(/\[(skip builds?|no builds?|only tests?)\]/i), forceBuilds: parseOption(/\[(force builds?)\]/i), @@ -1033,7 +1029,7 @@ async function getPipeline(options = {}) { steps.push({ key: "build-images", group: getBuildkiteEmoji("aws"), - steps: [...Array.from(imagePlatforms.values()).map(platform => getBuildImageStep(platform, !publishImages))], + steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform, options)), }); } @@ -1058,22 +1054,21 @@ async function getPipeline(options = {}) { .flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile }))) .map(target => { const imageKey = getImageKey(target); - const imagePlatform = imagePlatforms.get(imageKey); return getStepWithDependsOn( { key: getTargetKey(target), group: getTargetLabel(target), steps: unifiedBuilds - ? [getBuildBunStep(target, dryRun)] + ? [getBuildBunStep(target, options)] : [ - getBuildVendorStep(target, dryRun), - getBuildCppStep(target, dryRun), - getBuildZigStep(target, dryRun), - getLinkBunStep(target, dryRun), + getBuildVendorStep(target, options), + getBuildCppStep(target, options), + getBuildZigStep(target, options), + getLinkBunStep(target, options), ], }, - imagePlatform ? `${imageKey}-build-image` : undefined, + imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined, ); }), ); @@ -1088,14 +1083,14 @@ async function getPipeline(options = {}) { .map(target => ({ key: getTargetKey(target), group: getTargetLabel(target), - steps: [getTestBunStep(target, { unifiedTests, testFiles, buildId })], + steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })], })), ); } } if (isMainBranch()) { - steps.push(getReleaseStep(buildPlatforms)); + steps.push(getReleaseStep(buildPlatforms, options)); } /** @type {Map} */ diff --git a/.buildkite/scripts/prepare-build.sh b/.buildkite/scripts/prepare-build.sh deleted file mode 100755 index b0b3f9f37eaf51..00000000000000 --- a/.buildkite/scripts/prepare-build.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -set -eo pipefail - -function run_command() { - set -x - "$@" - { set +x; } 2>/dev/null -} - -run_command node ".buildkite/ci.mjs" "$@" diff --git a/.buildkite/scripts/upload-release.sh b/.buildkite/scripts/upload-release.sh index b684dfb4a3d958..fa5a2db11a143b 100755 --- a/.buildkite/scripts/upload-release.sh +++ b/.buildkite/scripts/upload-release.sh @@ -3,10 +3,6 @@ set -eo pipefail function assert_main() { - if [ "$RELEASE" == "1" ]; then - echo "info: Skipping canary release because this is a release build" - exit 0 - fi if [ -z "$BUILDKITE_REPO" ]; then echo "error: Cannot find repository for this build" exit 1 @@ -237,8 +233,7 @@ function create_release() { } function assert_canary() { - local canary="$(buildkite-agent meta-data get canary 2>/dev/null)" - if [ -z "$canary" ] || [ "$canary" == "0" ]; then + if [ -z "$CANARY" ] || [ "$CANARY" == "0" ]; then echo "warn: Skipping release because this is not a canary build" exit 0 fi diff --git a/cmake/Options.cmake b/cmake/Options.cmake index 201bf8c8e1536a..fe3219c2687686 100644 --- a/cmake/Options.cmake +++ b/cmake/Options.cmake @@ -67,13 +67,7 @@ optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT $ optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON) -if(ENABLE_CANARY AND BUILDKITE) - execute_process( - COMMAND buildkite-agent meta-data get "canary" - OUTPUT_VARIABLE DEFAULT_CANARY_REVISION - OUTPUT_STRIP_TRAILING_WHITESPACE - ) -elseif(ENABLE_CANARY) +if(ENABLE_CANARY) set(DEFAULT_CANARY_REVISION "1") else() set(DEFAULT_CANARY_REVISION "0") diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 6e9eab917f8d95..9a0b625bbc0365 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -576,6 +576,7 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig") set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets) +# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS} ${CWD}/src/io/*.cpp ${CWD}/src/bun.js/modules/*.cpp @@ -632,6 +633,7 @@ register_command( list(APPEND BUN_CPP_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES} + ${BUN_ERROR_CODE_OUTPUTS} ${VENDOR_PATH}/picohttpparser/picohttpparser.c ${NODEJS_HEADERS_PATH}/include/node/node_version.h ${BUN_ZIG_GENERATED_CLASSES_OUTPUTS} diff --git a/packages/bun-build-mdx-rs/src/lib.rs b/packages/bun-build-mdx-rs/src/lib.rs index 4b93e6037fc427..b0859b97ee5dcf 100644 --- a/packages/bun-build-mdx-rs/src/lib.rs +++ b/packages/bun-build-mdx-rs/src/lib.rs @@ -1,55 +1,25 @@ -use bun_native_plugin::{define_bun_plugin, BunLoader, OnBeforeParse}; +use bun_native_plugin::{anyhow, bun, define_bun_plugin, BunLoader, Result}; use mdxjs::{compile, Options as CompileOptions}; use napi_derive::napi; -#[macro_use] -extern crate napi; - define_bun_plugin!("bun-mdx-rs"); -#[no_mangle] -pub extern "C" fn bun_mdx_rs( - args: *const bun_native_plugin::sys::OnBeforeParseArguments, - result: *mut bun_native_plugin::sys::OnBeforeParseResult, -) { - let args = unsafe { &*args }; - - let mut handle = match OnBeforeParse::from_raw(args, result) { - Ok(handle) => handle, - Err(_) => { - return; - } - }; - - let source_str = match handle.input_source_code() { - Ok(source_str) => source_str, - Err(_) => { - handle.log_error("Failed to fetch source code"); - return; - } - }; +#[bun] +pub fn bun_mdx_rs(handle: &mut OnBeforeParse) -> Result<()> { + let source_str = handle.input_source_code()?; let mut options = CompileOptions::gfm(); // Leave it as JSX for Bun to handle options.jsx = true; - let path = match handle.path() { - Ok(path) => path, - Err(e) => { - handle.log_error(&format!("Failed to get path: {:?}", e)); - return; - } - }; + let path = handle.path()?; options.filepath = Some(path.to_string()); - match compile(&source_str, &options) { - Ok(compiled) => { - handle.set_output_source_code(compiled, BunLoader::BUN_LOADER_JSX); - } - Err(_) => { - handle.log_error("Failed to compile MDX"); - return; - } - } + let jsx = compile(&source_str, &options) + .map_err(|e| anyhow::anyhow!("Failed to compile MDX: {:?}", e))?; + + handle.set_output_source_code(jsx, BunLoader::BUN_LOADER_JSX); + + Ok(()) } diff --git a/packages/bun-native-plugin-rs/Cargo.lock b/packages/bun-native-plugin-rs/Cargo.lock index 202700fa3a3b56..0c786953f7489a 100644 --- a/packages/bun-native-plugin-rs/Cargo.lock +++ b/packages/bun-native-plugin-rs/Cargo.lock @@ -11,6 +11,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "anyhow" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" + [[package]] name = "bindgen" version = "0.70.1" @@ -37,11 +43,24 @@ version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +[[package]] +name = "bun-macro" +version = "0.1.0" +dependencies = [ + "anyhow", + "napi", + "quote", + "syn", +] + [[package]] name = "bun-native-plugin" version = "0.1.0" dependencies = [ + "anyhow", "bindgen", + "bun-macro", + "napi", ] [[package]] @@ -70,6 +89,25 @@ dependencies = [ "libloading", ] +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "ctor" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" +dependencies = [ + "quote", + "syn", +] + [[package]] name = "either" version = "1.13.0" @@ -125,6 +163,55 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" +[[package]] +name = "napi" +version = "2.16.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "214f07a80874bb96a8433b3cdfc84980d56c7b02e1a0d7ba4ba0db5cef785e2b" +dependencies = [ + "bitflags", + "ctor", + "napi-derive", + "napi-sys", + "once_cell", +] + +[[package]] +name = "napi-derive" +version = "2.16.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cbe2585d8ac223f7d34f13701434b9d5f4eb9c332cccce8dee57ea18ab8ab0c" +dependencies = [ + "cfg-if", + "convert_case", + "napi-derive-backend", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "napi-derive-backend" +version = "1.0.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1639aaa9eeb76e91c6ae66da8ce3e89e921cd3885e99ec85f4abacae72fc91bf" +dependencies = [ + "convert_case", + "once_cell", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "napi-sys" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "427802e8ec3a734331fec1035594a210ce1ff4dc5bc1950530920ab717964ea3" +dependencies = [ + "libloading", +] + [[package]] name = "nom" version = "7.1.3" @@ -135,6 +222,12 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + [[package]] name = "prettyplease" version = "0.2.25" @@ -221,6 +314,12 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + [[package]] name = "windows-targets" version = "0.52.6" diff --git a/packages/bun-native-plugin-rs/Cargo.toml b/packages/bun-native-plugin-rs/Cargo.toml index bf4d7b784b920c..55476cbd837ab9 100644 --- a/packages/bun-native-plugin-rs/Cargo.toml +++ b/packages/bun-native-plugin-rs/Cargo.toml @@ -5,3 +5,13 @@ edition = "2021" [build-dependencies] bindgen = "0.70.1" + +[dependencies] +anyhow = "1.0.94" +bun-macro = { path = "./bun-macro" } +napi = { version = "2.14.1", default-features = false, features = ["napi4"] } + +[features] +default = ["napi"] +napi = [] + diff --git a/packages/bun-native-plugin-rs/README.md b/packages/bun-native-plugin-rs/README.md index f235849872f7bd..6c57a2c9d1a703 100644 --- a/packages/bun-native-plugin-rs/README.md +++ b/packages/bun-native-plugin-rs/README.md @@ -4,7 +4,7 @@ This crate provides a Rustified wrapper over the Bun's native bundler plugin C API. -Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS: +Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS are: - Native plugins take full advantage of Bun's parallelized bundler pipeline and run on multiple threads at the same time - Unlike JS, native plugins don't need to do the UTF-8 <-> UTF-16 source code string conversions @@ -30,61 +30,84 @@ Then install this crate: cargo add bun-native-plugin ``` -Now, inside the `lib.rs` file, expose a C ABI function which has the same function signature as the plugin lifecycle hook that you want to implement. +Now, inside the `lib.rs` file, we'll use the `bun_native_plugin::bun` proc macro to define a function which +will implement our native plugin. -For example, implementing `onBeforeParse`: +Here's an example implementing the `onBeforeParse` hook: ```rs -use bun_native_plugin::{define_bun_plugin, OnBeforeParse}; +use bun_native_plugin::{define_bun_plugin, OnBeforeParse, bun, Result, anyhow, BunLoader}; use napi_derive::napi; -/// Define with the name of the plugin +/// Define the plugin and its name define_bun_plugin!("replace-foo-with-bar"); -/// This is necessary for napi-rs to compile this into a proper NAPI module -#[napi] -pub fn register_bun_plugin() {} - -/// Use `no_mangle` so that we can reference this symbol by name later -/// when registering this native plugin in JS. +/// Here we'll implement `onBeforeParse` with code that replaces all occurences of +/// `foo` with `bar`. /// -/// Here we'll create a dummy plugin which replaces all occurences of -/// `foo` with `bar` -#[no_mangle] -pub extern "C" fn on_before_parse_plugin_impl( +/// We use the #[bun] macro to generate some of the boilerplate code. +/// +/// The argument of the function (`handle: &mut OnBeforeParse`) tells +/// the macro that this function implements the `onBeforeParse` hook. +#[bun] +pub fn replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> { + // Fetch the input source code. + let input_source_code = handle.input_source_code()?; + + // Get the Loader for the file + let loader = handle.output_loader(); + + + let output_source_code = input_source_code.replace("foo", "bar"); + + handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX); + + Ok(()) +} +``` + +Internally, the `#[bun]` macro wraps your code and declares a C ABI function which implements +the function signature of `onBeforeParse` plugins in Bun's C API for bundler plugins. + +Then it calls your code. The wrapper looks _roughly_ like this: + +```rs +pub extern "C" fn replace_foo_with_bar( args: *const bun_native_plugin::sys::OnBeforeParseArguments, result: *mut bun_native_plugin::sys::OnBeforeParseResult, ) { + // The actual code you wrote is inlined here + fn __replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> { + // Fetch the input source code. + let input_source_code = handle.input_source_code()?; + + // Get the Loader for the file + let loader = handle.output_loader(); + + + let output_source_code = input_source_code.replace("foo", "bar"); + + handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX); + + Ok(()) + } + let args = unsafe { &*args }; - // This returns a handle which is a safe wrapper over the raw - // C API. let mut handle = OnBeforeParse::from_raw(args, result) { Ok(handle) => handle, Err(_) => { - // `OnBeforeParse::from_raw` handles error logging - // so it fine to return here. - return; - } - }; - - let input_source_code = match handle.input_source_code() { - Ok(source_str) => source_str, - Err(_) => { - // If we encounter an error, we must log it so that - // Bun knows this plugin failed. - handle.log_error("Failed to fetch source code!"); return; } }; - let loader = handle.output_loader(); - let output_source_code = source_str.replace("foo", "bar"); - handle.set_output_source_code(output_source_code, loader); + if let Err(e) = __replace_fo_with_bar(&handle) { + handle.log_err(&e.to_string()); + } } ``` -Then compile this NAPI module. If you using napi-rs, the `package.json` should have a `build` script you can run: +Now, let's compile this NAPI module. If you're using napi-rs, the `package.json` should have a `build` script you can run: ```bash bun run build @@ -107,7 +130,7 @@ const result = await Bun.build({ // We tell it to use function we implemented inside of our `lib.rs` code. build.onBeforeParse( { filter: /\.ts/ }, - { napiModule, symbol: "on_before_parse_plugin_impl" }, + { napiModule, symbol: "replace_foo_with_bar" }, ); }, }, @@ -119,19 +142,14 @@ const result = await Bun.build({ ### Error handling and panics -It is highly recommended to avoid panicking as this will crash the runtime. Instead, you must handle errors and log them: +In the case that the value of the `Result` your plugin function returns is an `Err(...)`, the error will be logged to Bun's bundler. -```rs -let input_source_code = match handle.input_source_code() { - Ok(source_str) => source_str, - Err(_) => { - // If we encounter an error, we must log it so that - // Bun knows this plugin failed. - handle.log_error("Failed to fetch source code!"); - return; - } -}; -``` +It is highly advised that you return all errors and avoid `.unwrap()`'ing or `.expecting()`'ing results. + +The `#[bun]` wrapper macro actually runs your code inside of a [`panic::catch_unwind`](https://doc.rust-lang.org/std/panic/fn.catch_unwind.html), +which may catch _some_ panics but **not guaranteed to catch all panics**. + +Therefore, it is recommended to **avoid panics at all costs**. ### Passing state to and from JS: `External` @@ -199,41 +217,16 @@ console.log("Total `foo`s encountered: ", pluginState.getFooCount()); Finally, from the native implementation of your plugin, you can extract the external: ```rs -pub extern "C" fn on_before_parse_plugin_impl( - args: *const bun_native_plugin::sys::OnBeforeParseArguments, - result: *mut bun_native_plugin::sys::OnBeforeParseResult, -) { - let args = unsafe { &*args }; - - let mut handle = OnBeforeParse::from_raw(args, result) { - Ok(handle) => handle, - Err(_) => { - // `OnBeforeParse::from_raw` handles error logging - // so it fine to return here. - return; - } - }; - - let plugin_state: &PluginState = +#[bun] +pub fn on_before_parse_plugin_impl(handle: &mut OnBeforeParse) { // This operation is only safe if you pass in an external when registering the plugin. // If you don't, this could lead to a segfault or access of undefined memory. - match unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown)) } { - Ok(state) => state, - Err(_) => { - handle.log_error("Failed to get external!"); - return; - } - }; + let plugin_state: &PluginState = + unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown))? }; // Fetch our source code again - let input_source_code = match handle.input_source_code() { - Ok(source_str) => source_str, - Err(_) => { - handle.log_error("Failed to fetch source code!"); - return; - } - }; + let input_source_code = handle.input_source_code()?; // Count the number of `foo`s and add it to our state let foo_count = source_code.matches("foo").count() as u32; @@ -243,6 +236,6 @@ pub extern "C" fn on_before_parse_plugin_impl( ### Concurrency -Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_. +Your plugin function can be called _on any thread_ at _any time_ and possibly _multiple times at once_. -Therefore, you must design any state management to be threadsafe +Therefore, you must design any state management to be threadsafe. diff --git a/packages/bun-native-plugin-rs/bun-macro/Cargo.toml b/packages/bun-native-plugin-rs/bun-macro/Cargo.toml new file mode 100644 index 00000000000000..f7491dee4b287b --- /dev/null +++ b/packages/bun-native-plugin-rs/bun-macro/Cargo.toml @@ -0,0 +1,14 @@ + +[package] +name = "bun-macro" +version = "0.1.0" +edition = "2021" + +[lib] +proc-macro = true + +[dependencies] +syn = { version = "2.0", features = ["full"] } +quote = "1.0" +napi = "2.16.13" +anyhow = "1.0.94" \ No newline at end of file diff --git a/packages/bun-native-plugin-rs/bun-macro/src/lib.rs b/packages/bun-native-plugin-rs/bun-macro/src/lib.rs new file mode 100644 index 00000000000000..efedbef86e2f15 --- /dev/null +++ b/packages/bun-native-plugin-rs/bun-macro/src/lib.rs @@ -0,0 +1,54 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, Ident, ItemFn}; + +#[proc_macro_attribute] +pub fn bun(_attr: TokenStream, item: TokenStream) -> TokenStream { + // Parse the input function + let input_fn = parse_macro_input!(item as ItemFn); + let fn_name = &input_fn.sig.ident; + let inner_fn_name = Ident::new(&format!("__{}", fn_name), fn_name.span()); + let fn_block = &input_fn.block; + + // Generate the wrapped function + let output = quote! { + #[no_mangle] + pub unsafe extern "C" fn #fn_name( + args_raw: *mut bun_native_plugin::sys::OnBeforeParseArguments, + result: *mut bun_native_plugin::sys::OnBeforeParseResult, + ) { + fn #inner_fn_name(handle: &mut bun_native_plugin::OnBeforeParse) -> Result<()> { + #fn_block + } + + let args_path = unsafe { (*args_raw).path_ptr }; + let args_path_len = unsafe { (*args_raw).path_len }; + let result_pointer = result; + + let result = std::panic::catch_unwind(|| { + let mut handle = match bun_native_plugin::OnBeforeParse::from_raw(args_raw, result) { + Ok(handle) => handle, + Err(_) => return, + }; + if let Err(e) = #inner_fn_name(&mut handle) { + handle.log_error(&format!("{:?}", e)); + } + }); + + if let Err(e) = result { + let msg_string = format!("Plugin crashed: {:?}", e); + let mut log_options = bun_native_plugin::log_from_message_and_level( + &msg_string, + bun_native_plugin::sys::BunLogLevel::BUN_LOG_LEVEL_ERROR, + args_path, + args_path_len, + ); + unsafe { + ((*result_pointer).log.unwrap())(args_raw, &mut log_options); + } + } + } + }; + + output.into() +} diff --git a/packages/bun-native-plugin-rs/src/lib.rs b/packages/bun-native-plugin-rs/src/lib.rs index 3e589e3bcd3e39..1a8f85941cbab3 100644 --- a/packages/bun-native-plugin-rs/src/lib.rs +++ b/packages/bun-native-plugin-rs/src/lib.rs @@ -244,10 +244,11 @@ //! Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_. //! //! Therefore, you must design any state management to be threadsafe - #![allow(non_upper_case_globals)] #![allow(non_camel_case_types)] #![allow(non_snake_case)] +pub use anyhow; +pub use bun_macro::bun; #[repr(transparent)] pub struct BunPluginName(*const c_char); @@ -261,7 +262,7 @@ impl BunPluginName { #[macro_export] macro_rules! define_bun_plugin { ($name:expr) => { - pub static BUN_PLUGIN_NAME_STRING: &str = $name; + pub static BUN_PLUGIN_NAME_STRING: &str = concat!($name, "\0"); #[no_mangle] pub static BUN_PLUGIN_NAME: bun_native_plugin::BunPluginName = @@ -279,7 +280,9 @@ use std::{ borrow::Cow, cell::UnsafeCell, ffi::{c_char, c_void}, + marker::PhantomData, str::Utf8Error, + sync::PoisonError, }; pub mod sys { @@ -323,7 +326,7 @@ impl Drop for SourceCodeContext { pub type BunLogLevel = sys::BunLogLevel; pub type BunLoader = sys::BunLoader; -fn get_from_raw_str<'a>(ptr: *const u8, len: usize) -> Result> { +fn get_from_raw_str<'a>(ptr: *const u8, len: usize) -> PluginResult> { let slice: &'a [u8] = unsafe { std::slice::from_raw_parts(ptr, len) }; // Windows allows invalid UTF-16 strings in the filesystem. These get converted to WTF-8 in Zig. @@ -351,9 +354,31 @@ pub enum Error { IncompatiblePluginVersion, ExternalTypeMismatch, Unknown, + LockPoisoned, +} + +pub type PluginResult = std::result::Result; +pub type Result = anyhow::Result; + +impl std::fmt::Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self) + } } -pub type Result = std::result::Result; +impl std::error::Error for Error { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + None + } + + fn description(&self) -> &str { + "description() is deprecated; use Display" + } + + fn cause(&self) -> Option<&dyn std::error::Error> { + self.source() + } +} impl From for Error { fn from(value: Utf8Error) -> Self { @@ -361,6 +386,12 @@ impl From for Error { } } +impl From> for Error { + fn from(_: PoisonError) -> Self { + Self::LockPoisoned + } +} + /// A safe handle for the arguments + result struct for the /// `OnBeforeParse` bundler lifecycle hook. /// @@ -370,9 +401,10 @@ impl From for Error { /// /// To initialize this struct, see the `from_raw` method. pub struct OnBeforeParse<'a> { - args_raw: &'a sys::OnBeforeParseArguments, + pub args_raw: *mut sys::OnBeforeParseArguments, result_raw: *mut sys::OnBeforeParseResult, compilation_context: *mut SourceCodeContext, + __phantom: PhantomData<&'a ()>, } impl<'a> OnBeforeParse<'a> { @@ -394,10 +426,10 @@ impl<'a> OnBeforeParse<'a> { /// } /// ``` pub fn from_raw( - args: &'a sys::OnBeforeParseArguments, + args: *mut sys::OnBeforeParseArguments, result: *mut sys::OnBeforeParseResult, - ) -> Result { - if args.__struct_size < std::mem::size_of::() + ) -> PluginResult { + if unsafe { (*args).__struct_size } < std::mem::size_of::() || unsafe { (*result).__struct_size } < std::mem::size_of::() { let message = "This plugin is not compatible with the current version of Bun."; @@ -405,8 +437,8 @@ impl<'a> OnBeforeParse<'a> { __struct_size: std::mem::size_of::(), message_ptr: message.as_ptr(), message_len: message.len(), - path_ptr: args.path_ptr, - path_len: args.path_len, + path_ptr: unsafe { (*args).path_ptr }, + path_len: unsafe { (*args).path_len }, source_line_text_ptr: std::ptr::null(), source_line_text_len: 0, level: BunLogLevel::BUN_LOG_LEVEL_ERROR as i8, @@ -426,15 +458,21 @@ impl<'a> OnBeforeParse<'a> { args_raw: args, result_raw: result, compilation_context: std::ptr::null_mut() as *mut _, + __phantom: Default::default(), }) } - pub fn path(&self) -> Result> { - get_from_raw_str(self.args_raw.path_ptr, self.args_raw.path_len) + pub fn path(&self) -> PluginResult> { + unsafe { get_from_raw_str((*self.args_raw).path_ptr, (*self.args_raw).path_len) } } - pub fn namespace(&self) -> Result> { - get_from_raw_str(self.args_raw.namespace_ptr, self.args_raw.namespace_len) + pub fn namespace(&self) -> PluginResult> { + unsafe { + get_from_raw_str( + (*self.args_raw).namespace_ptr, + (*self.args_raw).namespace_len, + ) + } } /// Get the external object from the `OnBeforeParse` arguments. @@ -485,12 +523,13 @@ impl<'a> OnBeforeParse<'a> { /// }, /// }; /// ``` - pub unsafe fn external(&self) -> Result> { - if self.args_raw.external.is_null() { + pub unsafe fn external(&self) -> PluginResult> { + if unsafe { (*self.args_raw).external.is_null() } { return Ok(None); } - let external: *mut TaggedObject = self.args_raw.external as *mut TaggedObject; + let external: *mut TaggedObject = + unsafe { (*self.args_raw).external as *mut TaggedObject }; unsafe { if (*external).type_id != TypeId::of::() { @@ -505,12 +544,13 @@ impl<'a> OnBeforeParse<'a> { /// /// This is unsafe as you must ensure that no other invocation of the plugin /// simultaneously holds a mutable reference to the external. - pub unsafe fn external_mut(&mut self) -> Result> { - if self.args_raw.external.is_null() { + pub unsafe fn external_mut(&mut self) -> PluginResult> { + if unsafe { (*self.args_raw).external.is_null() } { return Ok(None); } - let external: *mut TaggedObject = self.args_raw.external as *mut TaggedObject; + let external: *mut TaggedObject = + unsafe { (*self.args_raw).external as *mut TaggedObject }; unsafe { if (*external).type_id != TypeId::of::() { @@ -525,9 +565,12 @@ impl<'a> OnBeforeParse<'a> { /// /// On Windows, this function may return an `Err(Error::Utf8(...))` if the /// source code contains invalid UTF-8. - pub fn input_source_code(&self) -> Result> { + pub fn input_source_code(&self) -> PluginResult> { let fetch_result = unsafe { - ((*self.result_raw).fetchSourceCode.unwrap())(self.args_raw, self.result_raw) + ((*self.result_raw).fetchSourceCode.unwrap())( + self.args_raw as *const _, + self.result_raw, + ) }; if fetch_result != 0 { @@ -587,7 +630,7 @@ impl<'a> OnBeforeParse<'a> { } /// Set the output loader for the current file. - pub fn set_output_loader(&self, loader: BunLogLevel) { + pub fn set_output_loader(&self, loader: BunLoader) { // SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe. unsafe { (*self.result_raw).loader = loader as u8; @@ -606,22 +649,36 @@ impl<'a> OnBeforeParse<'a> { /// Log a message with the given level. pub fn log(&self, message: &str, level: BunLogLevel) { - let mut log_options = sys::BunLogOptions { - __struct_size: std::mem::size_of::(), - message_ptr: message.as_ptr(), - message_len: message.len(), - path_ptr: self.args_raw.path_ptr, - path_len: self.args_raw.path_len, - source_line_text_ptr: std::ptr::null(), - source_line_text_len: 0, - level: level as i8, - line: 0, - lineEnd: 0, - column: 0, - columnEnd: 0, - }; + let mut log_options = log_from_message_and_level( + message, + level, + unsafe { (*self.args_raw).path_ptr }, + unsafe { (*self.args_raw).path_len }, + ); unsafe { ((*self.result_raw).log.unwrap())(self.args_raw, &mut log_options); } } } + +pub fn log_from_message_and_level( + message: &str, + level: BunLogLevel, + path: *const u8, + path_len: usize, +) -> sys::BunLogOptions { + sys::BunLogOptions { + __struct_size: std::mem::size_of::(), + message_ptr: message.as_ptr(), + message_len: message.len(), + path_ptr: path as *const _, + path_len, + source_line_text_ptr: std::ptr::null(), + source_line_text_len: 0, + level: level as i8, + line: 0, + lineEnd: 0, + column: 0, + columnEnd: 0, + } +} diff --git a/scripts/utils.mjs b/scripts/utils.mjs index df0755b284cbd4..36ca734eb34f48 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -2237,7 +2237,7 @@ export async function waitForPort(options) { return cause; } /** - * @returns {Promise} + * @returns {Promise} */ export async function getCanaryRevision() { if (isPullRequest() || isFork()) { diff --git a/src/allocators.zig b/src/allocators.zig index e43c11ed074ee0..32b3e5fd8a21aa 100644 --- a/src/allocators.zig +++ b/src/allocators.zig @@ -4,6 +4,7 @@ const FeatureFlags = @import("./feature_flags.zig"); const Environment = @import("./env.zig"); const FixedBufferAllocator = std.heap.FixedBufferAllocator; const bun = @import("root").bun; +const OOM = bun.OOM; pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool { return (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and @@ -328,7 +329,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type return @constCast(slice); } - pub fn appendMutable(self: *Self, comptime AppendType: type, _value: AppendType) ![]u8 { + pub fn appendMutable(self: *Self, comptime AppendType: type, _value: AppendType) OOM![]u8 { const appended = try @call(bun.callmod_inline, append, .{ self, AppendType, _value }); return @constCast(appended); } @@ -337,17 +338,17 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type return try self.appendMutable(EmptyType, EmptyType{ .len = len }); } - pub fn printWithType(self: *Self, comptime fmt: []const u8, comptime Args: type, args: Args) ![]const u8 { + pub fn printWithType(self: *Self, comptime fmt: []const u8, comptime Args: type, args: Args) OOM![]const u8 { var buf = try self.appendMutable(EmptyType, EmptyType{ .len = std.fmt.count(fmt, args) + 1 }); buf[buf.len - 1] = 0; return std.fmt.bufPrint(buf.ptr[0 .. buf.len - 1], fmt, args) catch unreachable; } - pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) ![]const u8 { + pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) OOM![]const u8 { return try printWithType(self, fmt, @TypeOf(args), args); } - pub fn append(self: *Self, comptime AppendType: type, _value: AppendType) ![]const u8 { + pub fn append(self: *Self, comptime AppendType: type, _value: AppendType) OOM![]const u8 { self.mutex.lock(); defer self.mutex.unlock(); @@ -355,7 +356,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type } threadlocal var lowercase_append_buf: bun.PathBuffer = undefined; - pub fn appendLowerCase(self: *Self, comptime AppendType: type, _value: AppendType) ![]const u8 { + pub fn appendLowerCase(self: *Self, comptime AppendType: type, _value: AppendType) OOM![]const u8 { self.mutex.lock(); defer self.mutex.unlock(); @@ -374,7 +375,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type self: *Self, comptime AppendType: type, _value: AppendType, - ) ![]const u8 { + ) OOM![]const u8 { const value_len: usize = brk: { switch (comptime AppendType) { EmptyType, []const u8, []u8, [:0]const u8, [:0]u8 => { diff --git a/src/bun.js/bindings/BunProcess.cpp b/src/bun.js/bindings/BunProcess.cpp index d4c21b6768274f..62cc3fa6c1f55c 100644 --- a/src/bun.js/bindings/BunProcess.cpp +++ b/src/bun.js/bindings/BunProcess.cpp @@ -428,12 +428,23 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionDlopen, EncodedJSValue exportsValue = JSC::JSValue::encode(exports); JSC::JSValue resultValue = JSValue::decode(napi_register_module_v1(globalObject, exportsValue)); - // TODO: think about the finalizer here - // currently we do not dealloc napi modules so we don't have to worry about it right now - auto* meta = new Bun::NapiModuleMeta(globalObject->m_pendingNapiModuleDlopenHandle); - Bun::NapiExternal* napi_external = Bun::NapiExternal::create(vm, globalObject->NapiExternalStructure(), meta, nullptr, nullptr); - bool success = resultValue.getObject()->putDirect(vm, WebCore::builtinNames(vm).napiDlopenHandlePrivateName(), napi_external, JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly); - ASSERT(success); + if (auto resultObject = resultValue.getObject()) { +#if OS(DARWIN) || OS(LINUX) + // If this is a native bundler plugin we want to store the handle from dlopen + // as we are going to call `dlsym()` on it later to get the plugin implementation. + const char** pointer_to_plugin_name = (const char**)dlsym(handle, "BUN_PLUGIN_NAME"); +#elif OS(WINDOWS) + const char** pointer_to_plugin_name = (const char**)GetProcAddress(handle, "BUN_PLUGIN_NAME"); +#endif + if (pointer_to_plugin_name) { + // TODO: think about the finalizer here + // currently we do not dealloc napi modules so we don't have to worry about it right now + auto* meta = new Bun::NapiModuleMeta(globalObject->m_pendingNapiModuleDlopenHandle); + Bun::NapiExternal* napi_external = Bun::NapiExternal::create(vm, globalObject->NapiExternalStructure(), meta, nullptr, nullptr); + bool success = resultObject->putDirect(vm, WebCore::builtinNames(vm).napiDlopenHandlePrivateName(), napi_external, JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly); + ASSERT(success); + } + } RETURN_IF_EXCEPTION(scope, {}); diff --git a/src/bun.js/bindings/ErrorCode.cpp b/src/bun.js/bindings/ErrorCode.cpp index 2d9142401f7725..26d7664ef520ad 100644 --- a/src/bun.js/bindings/ErrorCode.cpp +++ b/src/bun.js/bindings/ErrorCode.cpp @@ -47,6 +47,9 @@ static JSC::JSObject* createErrorPrototype(JSC::VM& vm, JSC::JSGlobalObject* glo case JSC::ErrorType::Error: prototype = JSC::constructEmptyObject(globalObject, globalObject->errorPrototype()); break; + case JSC::ErrorType::URIError: + prototype = JSC::constructEmptyObject(globalObject, globalObject->m_URIErrorStructure.prototype(globalObject)); + break; default: { RELEASE_ASSERT_NOT_REACHED_WITH_MESSAGE("TODO: Add support for more error types"); break; diff --git a/src/bun.js/bindings/ErrorCode.ts b/src/bun.js/bindings/ErrorCode.ts index a2184f7215f807..7460631180416f 100644 --- a/src/bun.js/bindings/ErrorCode.ts +++ b/src/bun.js/bindings/ErrorCode.ts @@ -1,84 +1,86 @@ +// used by generate-node-errors.ts type ErrorCodeMapping = Array< [ /** error.code */ string, /** Constructor **/ typeof TypeError | typeof RangeError | typeof Error | typeof SyntaxError, - /** error.name */ + /** error.name. Defaults to `Constructor.name` (that is, mapping[1].name */ string, ] >; export default [ ["ABORT_ERR", Error, "AbortError"], - ["ERR_CRYPTO_INVALID_DIGEST", TypeError, "TypeError"], - ["ERR_ENCODING_INVALID_ENCODED_DATA", TypeError, "TypeError"], - ["ERR_INVALID_ARG_TYPE", TypeError, "TypeError"], - ["ERR_INVALID_ARG_VALUE", TypeError, "TypeError"], - ["ERR_INVALID_PROTOCOL", TypeError, "TypeError"], - ["ERR_INVALID_THIS", TypeError, "TypeError"], - ["ERR_IPC_CHANNEL_CLOSED", Error, "Error"], - ["ERR_IPC_DISCONNECTED", Error, "Error"], - ["ERR_MISSING_ARGS", TypeError, "TypeError"], - ["ERR_OUT_OF_RANGE", RangeError, "RangeError"], - ["ERR_PARSE_ARGS_INVALID_OPTION_VALUE", TypeError, "TypeError"], - ["ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL", TypeError, "TypeError"], - ["ERR_PARSE_ARGS_UNKNOWN_OPTION", TypeError, "TypeError"], - ["ERR_SERVER_NOT_RUNNING", Error, "Error"], - ["ERR_SOCKET_BAD_TYPE", TypeError, "TypeError"], - ["ERR_STREAM_ALREADY_FINISHED", TypeError, "TypeError"], - ["ERR_STREAM_CANNOT_PIPE", TypeError, "TypeError"], - ["ERR_STREAM_DESTROYED", TypeError, "TypeError"], - ["ERR_STREAM_NULL_VALUES", TypeError, "TypeError"], - ["ERR_STREAM_WRITE_AFTER_END", TypeError, "TypeError"], - ["ERR_ZLIB_INITIALIZATION_FAILED", Error, "Error"], - ["ERR_STRING_TOO_LONG", Error, "Error"], - ["ERR_CRYPTO_SCRYPT_INVALID_PARAMETER", Error, "Error"], - ["ERR_CRYPTO_INVALID_SCRYPT_PARAMS", RangeError, "RangeError"], - ["MODULE_NOT_FOUND", Error, "Error"], - ["ERR_ILLEGAL_CONSTRUCTOR", TypeError, "TypeError"], - ["ERR_INVALID_URL", TypeError, "TypeError"], - ["ERR_BUFFER_TOO_LARGE", RangeError, "RangeError"], - ["ERR_BROTLI_INVALID_PARAM", RangeError, "RangeError"], - ["ERR_UNKNOWN_ENCODING", TypeError, "TypeError"], - ["ERR_INVALID_STATE", Error, "Error"], - ["ERR_BUFFER_OUT_OF_BOUNDS", RangeError, "RangeError"], - ["ERR_UNKNOWN_SIGNAL", TypeError, "TypeError"], - ["ERR_SOCKET_BAD_PORT", RangeError, "RangeError"], + ["ERR_CRYPTO_INVALID_DIGEST", TypeError], + ["ERR_ENCODING_INVALID_ENCODED_DATA", TypeError], + ["ERR_INVALID_ARG_TYPE", TypeError], + ["ERR_INVALID_ARG_VALUE", TypeError], + ["ERR_INVALID_PROTOCOL", TypeError], + ["ERR_INVALID_THIS", TypeError], + ["ERR_IPC_CHANNEL_CLOSED", Error], + ["ERR_IPC_DISCONNECTED", Error], + ["ERR_MISSING_ARGS", TypeError], + ["ERR_OUT_OF_RANGE", RangeError], + ["ERR_PARSE_ARGS_INVALID_OPTION_VALUE", TypeError], + ["ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL", TypeError], + ["ERR_PARSE_ARGS_UNKNOWN_OPTION", TypeError], + ["ERR_SERVER_NOT_RUNNING", Error], + ["ERR_SOCKET_BAD_TYPE", TypeError], + ["ERR_STREAM_ALREADY_FINISHED", TypeError], + ["ERR_STREAM_CANNOT_PIPE", TypeError], + ["ERR_STREAM_DESTROYED", TypeError], + ["ERR_STREAM_NULL_VALUES", TypeError], + ["ERR_STREAM_WRITE_AFTER_END", TypeError], + ["ERR_ZLIB_INITIALIZATION_FAILED", Error], + ["ERR_STRING_TOO_LONG", Error], + ["ERR_CRYPTO_SCRYPT_INVALID_PARAMETER", Error], + ["ERR_CRYPTO_INVALID_SCRYPT_PARAMS", RangeError], + ["MODULE_NOT_FOUND", Error], + ["ERR_ILLEGAL_CONSTRUCTOR", TypeError], + ["ERR_INVALID_URL", TypeError], + ["ERR_BUFFER_TOO_LARGE", RangeError], + ["ERR_BROTLI_INVALID_PARAM", RangeError], + ["ERR_UNKNOWN_ENCODING", TypeError], + ["ERR_INVALID_STATE", Error], + ["ERR_BUFFER_OUT_OF_BOUNDS", RangeError], + ["ERR_UNKNOWN_SIGNAL", TypeError], + ["ERR_SOCKET_BAD_PORT", RangeError], ["ERR_STREAM_RELEASE_LOCK", Error, "AbortError"], + ["ERR_INVALID_URI", URIError, "URIError"], // Bun-specific - ["ERR_FORMDATA_PARSE_ERROR", TypeError, "TypeError"], - ["ERR_BODY_ALREADY_USED", Error, "Error"], - ["ERR_STREAM_WRAP", Error, "Error"], - ["ERR_BORINGSSL", Error, "Error"], + ["ERR_FORMDATA_PARSE_ERROR", TypeError], + ["ERR_BODY_ALREADY_USED", Error], + ["ERR_STREAM_WRAP", Error], + ["ERR_BORINGSSL", Error], //NET - ["ERR_SOCKET_CLOSED_BEFORE_CONNECTION", Error, "Error"], - ["ERR_SOCKET_CLOSED", Error, "Error"], + ["ERR_SOCKET_CLOSED_BEFORE_CONNECTION", Error], + ["ERR_SOCKET_CLOSED", Error], //HTTP2 - ["ERR_INVALID_HTTP_TOKEN", TypeError, "TypeError"], - ["ERR_HTTP2_PSEUDOHEADER_NOT_ALLOWED", TypeError, "TypeError"], - ["ERR_HTTP2_SEND_FILE", Error, "Error"], - ["ERR_HTTP2_SEND_FILE_NOSEEK", Error, "Error"], + ["ERR_INVALID_HTTP_TOKEN", TypeError], + ["ERR_HTTP2_PSEUDOHEADER_NOT_ALLOWED", TypeError], + ["ERR_HTTP2_SEND_FILE", Error], + ["ERR_HTTP2_SEND_FILE_NOSEEK", Error], ["ERR_HTTP2_HEADERS_SENT", Error, "ERR_HTTP2_HEADERS_SENT"], - ["ERR_HTTP2_INFO_STATUS_NOT_ALLOWED", RangeError, "RangeError"], - ["ERR_HTTP2_STATUS_INVALID", RangeError, "RangeError"], - ["ERR_HTTP2_INVALID_PSEUDOHEADER", TypeError, "TypeError"], - ["ERR_HTTP2_INVALID_HEADER_VALUE", TypeError, "TypeError"], - ["ERR_HTTP2_PING_CANCEL", Error, "Error"], - ["ERR_HTTP2_STREAM_ERROR", Error, "Error"], - ["ERR_HTTP2_INVALID_SINGLE_VALUE_HEADER", TypeError, "TypeError"], - ["ERR_HTTP2_SESSION_ERROR", Error, "Error"], - ["ERR_HTTP2_INVALID_SESSION", Error, "Error"], - ["ERR_HTTP2_INVALID_HEADERS", Error, "Error"], - ["ERR_HTTP2_PING_LENGTH", RangeError, "RangeError"], - ["ERR_HTTP2_INVALID_STREAM", Error, "Error"], - ["ERR_HTTP2_TRAILERS_ALREADY_SENT", Error, "Error"], - ["ERR_HTTP2_TRAILERS_NOT_READY", Error, "Error"], - ["ERR_HTTP2_PAYLOAD_FORBIDDEN", Error, "Error"], - ["ERR_HTTP2_NO_SOCKET_MANIPULATION", Error, "Error"], - ["ERR_HTTP2_SOCKET_UNBOUND", Error, "Error"], - ["ERR_HTTP2_ERROR", Error, "Error"], - ["ERR_HTTP2_OUT_OF_STREAMS", Error, "Error"], + ["ERR_HTTP2_INFO_STATUS_NOT_ALLOWED", RangeError], + ["ERR_HTTP2_STATUS_INVALID", RangeError], + ["ERR_HTTP2_INVALID_PSEUDOHEADER", TypeError], + ["ERR_HTTP2_INVALID_HEADER_VALUE", TypeError], + ["ERR_HTTP2_PING_CANCEL", Error], + ["ERR_HTTP2_STREAM_ERROR", Error], + ["ERR_HTTP2_INVALID_SINGLE_VALUE_HEADER", TypeError], + ["ERR_HTTP2_SESSION_ERROR", Error], + ["ERR_HTTP2_INVALID_SESSION", Error], + ["ERR_HTTP2_INVALID_HEADERS", Error], + ["ERR_HTTP2_PING_LENGTH", RangeError], + ["ERR_HTTP2_INVALID_STREAM", Error], + ["ERR_HTTP2_TRAILERS_ALREADY_SENT", Error], + ["ERR_HTTP2_TRAILERS_NOT_READY", Error], + ["ERR_HTTP2_PAYLOAD_FORBIDDEN", Error], + ["ERR_HTTP2_NO_SOCKET_MANIPULATION", Error], + ["ERR_HTTP2_SOCKET_UNBOUND", Error], + ["ERR_HTTP2_ERROR", Error], + ["ERR_HTTP2_OUT_OF_STREAMS", Error], ] as ErrorCodeMapping; diff --git a/src/bun.js/bindings/JSBundlerPlugin.cpp b/src/bun.js/bindings/JSBundlerPlugin.cpp index fe1d28a8a48016..fea22a07a47b9d 100644 --- a/src/bun.js/bindings/JSBundlerPlugin.cpp +++ b/src/bun.js/bindings/JSBundlerPlugin.cpp @@ -38,6 +38,7 @@ namespace Bun { extern "C" int OnBeforeParsePlugin__isDone(void* context); +extern "C" void OnBeforeParseResult__reset(OnBeforeParseResult* result); #define WRAP_BUNDLER_PLUGIN(argName) jsDoubleNumber(std::bit_cast(reinterpret_cast(argName))) #define UNWRAP_BUNDLER_PLUGIN(callFrame) reinterpret_cast(std::bit_cast(callFrame->argument(0).asDouble())) @@ -61,21 +62,18 @@ void BundlerPlugin::NamespaceList::append(JSC::VM& vm, JSC::RegExp* filter, Stri if (nsGroup == nullptr) { namespaces.append(namespaceString); - groups.append(Vector {}); + groups.append(Vector {}); nsGroup = &groups.last(); index = namespaces.size() - 1; } - Yarr::RegularExpression regex( - StringView(filter->pattern()), - filter->flags()); - - nsGroup->append(WTFMove(regex)); + auto pattern = filter->pattern(); + auto filter_regexp = FilterRegExp(pattern, filter->flags()); + nsGroup->append(WTFMove(filter_regexp)); } static bool anyMatchesForNamespace(JSC::VM& vm, BundlerPlugin::NamespaceList& list, const BunString* namespaceStr, const BunString* path) { - constexpr bool usesPatternContextBuffer = false; if (list.fileNamespace.isEmpty() && list.namespaces.isEmpty()) return false; @@ -92,8 +90,7 @@ static bool anyMatchesForNamespace(JSC::VM& vm, BundlerPlugin::NamespaceList& li auto pathString = path->toWTFString(BunString::ZeroCopy); for (auto& filter : filters) { - Yarr::MatchingContextHolder regExpContext(vm, usesPatternContextBuffer, nullptr, Yarr::MatchFrom::CompilerThread); - if (filter.match(pathString) > -1) { + if (filter.match(vm, pathString)) { return true; } } @@ -243,18 +240,14 @@ void BundlerPlugin::NativePluginList::append(JSC::VM& vm, JSC::RegExp* filter, S if (nsGroup == nullptr) { namespaces.append(namespaceString); - groups.append(Vector {}); + groups.append(Vector {}); nsGroup = &groups.last(); index = namespaces.size() - 1; } - Yarr::RegularExpression regex( - StringView(filter->pattern()), - filter->flags()); - - NativeFilterRegexp nativeFilterRegexp = std::make_pair(regex, std::make_shared()); - - nsGroup->append(nativeFilterRegexp); + auto pattern = filter->pattern(); + auto filter_regexp = FilterRegExp(pattern, filter->flags()); + nsGroup->append(WTFMove(filter_regexp)); } if (index == std::numeric_limits::max()) { @@ -271,45 +264,54 @@ void BundlerPlugin::NativePluginList::append(JSC::VM& vm, JSC::RegExp* filter, S } } +bool BundlerPlugin::FilterRegExp::match(JSC::VM& vm, const String& path) +{ + WTF::Locker locker { lock }; + constexpr bool usesPatternContextBuffer = false; + Yarr::MatchingContextHolder regExpContext(vm, usesPatternContextBuffer, nullptr, Yarr::MatchFrom::CompilerThread); + return regex.match(path) != -1; +} + extern "C" void CrashHandler__setInsideNativePlugin(const char* plugin_name); -int BundlerPlugin::NativePluginList::call(JSC::VM& vm, BundlerPlugin* plugin, int* shouldContinue, void* bunContextPtr, const BunString* namespaceStr, const BunString* pathString, void* onBeforeParseArgs, void* onBeforeParseResult) +int BundlerPlugin::NativePluginList::call(JSC::VM& vm, BundlerPlugin* plugin, int* shouldContinue, void* bunContextPtr, const BunString* namespaceStr, const BunString* pathString, OnBeforeParseArguments* onBeforeParseArgs, OnBeforeParseResult* onBeforeParseResult) { unsigned index = 0; - const auto* group = this->group(namespaceStr->toWTFString(BunString::ZeroCopy), index); - if (group == nullptr) { + auto* groupPtr = this->group(namespaceStr->toWTFString(BunString::ZeroCopy), index); + if (groupPtr == nullptr) { return -1; } + auto& filters = *groupPtr; const auto& callbacks = index == std::numeric_limits::max() ? this->fileCallbacks : this->namespaceCallbacks[index]; - ASSERT_WITH_MESSAGE(callbacks.size() == group->size(), "Number of callbacks and filters must match"); + ASSERT_WITH_MESSAGE(callbacks.size() == filters.size(), "Number of callbacks and filters must match"); if (callbacks.isEmpty()) { return -1; } int count = 0; - constexpr bool usesPatternContextBuffer = false; const WTF::String& path = pathString->toWTFString(BunString::ZeroCopy); for (size_t i = 0, total = callbacks.size(); i < total && *shouldContinue; ++i) { - Yarr::MatchingContextHolder regExpContext(vm, usesPatternContextBuffer, nullptr, Yarr::MatchFrom::CompilerThread); - - // Need to lock the mutex to access the regular expression - { - std::lock_guard lock(*group->at(i).second); - if (group->at(i).first.match(path) > -1) { - Bun::NapiExternal* external = callbacks[i].external; - if (external) { - ((OnBeforeParseArguments*)(onBeforeParseArgs))->external = external->value(); - } - - JSBundlerPluginNativeOnBeforeParseCallback callback = callbacks[i].callback; - const char* name = callbacks[i].name ? callbacks[i].name : ""; - CrashHandler__setInsideNativePlugin(name); - callback(onBeforeParseArgs, onBeforeParseResult); - CrashHandler__setInsideNativePlugin(nullptr); - - count++; + + if (i > 0) { + OnBeforeParseResult__reset(onBeforeParseResult); + } + + if (filters[i].match(vm, path)) { + Bun::NapiExternal* external = callbacks[i].external; + if (external) { + onBeforeParseArgs->external = external->value(); + } else { + onBeforeParseArgs->external = nullptr; } + + JSBundlerPluginNativeOnBeforeParseCallback callback = callbacks[i].callback; + const char* name = callbacks[i].name ? callbacks[i].name : ""; + CrashHandler__setInsideNativePlugin(name); + callback(onBeforeParseArgs, onBeforeParseResult); + CrashHandler__setInsideNativePlugin(nullptr); + + count++; } if (OnBeforeParsePlugin__isDone(bunContextPtr)) { @@ -373,7 +375,7 @@ JSC_DEFINE_HOST_FUNCTION(jsBundlerPluginFunction_onBeforeParse, (JSC::JSGlobalOb #endif if (!on_before_parse_symbol_ptr) { - Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, "Expected on_before_parse_symbol (3rd argument) to be a valid symbol"_s); + Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, makeString("Could not find the symbol \""_s, on_before_parse_symbol, "\" in the given napi module."_s)); return {}; } @@ -648,7 +650,7 @@ extern "C" int JSBundlerPlugin__callOnBeforeParsePlugins( const BunString* namespaceStr, const BunString* pathString, OnBeforeParseArguments* onBeforeParseArgs, - void* onBeforeParseResult, + OnBeforeParseResult* onBeforeParseResult, int* shouldContinue) { return plugin->plugin.onBeforeParse.call(plugin->vm(), &plugin->plugin, shouldContinue, bunContextPtr, namespaceStr, pathString, onBeforeParseArgs, onBeforeParseResult); diff --git a/src/bun.js/bindings/JSBundlerPlugin.h b/src/bun.js/bindings/JSBundlerPlugin.h index da28a8e4857335..7bef5769fa2626 100644 --- a/src/bun.js/bindings/JSBundlerPlugin.h +++ b/src/bun.js/bindings/JSBundlerPlugin.h @@ -1,5 +1,6 @@ #pragma once +#include "bun-native-bundler-plugin-api/bundler_plugin.h" #include "root.h" #include "headers-handwritten.h" #include @@ -10,7 +11,7 @@ typedef void (*JSBundlerPluginAddErrorCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue); typedef void (*JSBundlerPluginOnLoadAsyncCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue); typedef void (*JSBundlerPluginOnResolveAsyncCallback)(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue, JSC::EncodedJSValue); -typedef void (*JSBundlerPluginNativeOnBeforeParseCallback)(void*, void*); +typedef void (*JSBundlerPluginNativeOnBeforeParseCallback)(const OnBeforeParseArguments*, OnBeforeParseResult*); namespace Bun { @@ -18,14 +19,38 @@ using namespace JSC; class BundlerPlugin final { public: + /// In native plugins, the regular expression could be called concurrently on multiple threads. + /// Therefore, we need a mutex to synchronize access. + class FilterRegExp { + public: + String m_pattern; + Yarr::RegularExpression regex; + WTF::Lock lock {}; + + FilterRegExp(FilterRegExp&& other) + : m_pattern(WTFMove(other.m_pattern)) + , regex(WTFMove(other.regex)) + { + } + + FilterRegExp(const String& pattern, OptionSet flags) + // Ensure it's safe for cross-thread usage. + : m_pattern(pattern.isolatedCopy()) + , regex(m_pattern, flags) + { + } + + bool match(JSC::VM& vm, const String& path); + }; + class NamespaceList { public: - Vector fileNamespace = {}; + Vector fileNamespace = {}; Vector namespaces = {}; - Vector> groups = {}; + Vector> groups = {}; BunPluginTarget target { BunPluginTargetBun }; - Vector* group(const String& namespaceStr, unsigned& index) + Vector* group(const String& namespaceStr, unsigned& index) { if (namespaceStr.isEmpty()) { index = std::numeric_limits::max(); @@ -46,10 +71,6 @@ class BundlerPlugin final { void append(JSC::VM& vm, JSC::RegExp* filter, String& namespaceString, unsigned& index); }; - /// In native plugins, the regular expression could be called concurrently on multiple threads. - /// Therefore, we need a mutex to synchronize access. - typedef std::pair> NativeFilterRegexp; - struct NativePluginCallback { JSBundlerPluginNativeOnBeforeParseCallback callback; Bun::NapiExternal* external; @@ -65,18 +86,18 @@ class BundlerPlugin final { public: using PerNamespaceCallbackList = Vector; - Vector fileNamespace = {}; + Vector fileNamespace = {}; Vector namespaces = {}; - Vector> groups = {}; + Vector> groups = {}; BunPluginTarget target { BunPluginTargetBun }; PerNamespaceCallbackList fileCallbacks = {}; Vector namespaceCallbacks = {}; - int call(JSC::VM& vm, BundlerPlugin* plugin, int* shouldContinue, void* bunContextPtr, const BunString* namespaceStr, const BunString* pathString, void* onBeforeParseArgs, void* onBeforeParseResult); + int call(JSC::VM& vm, BundlerPlugin* plugin, int* shouldContinue, void* bunContextPtr, const BunString* namespaceStr, const BunString* pathString, OnBeforeParseArguments* onBeforeParseArgs, OnBeforeParseResult* onBeforeParseResult); void append(JSC::VM& vm, JSC::RegExp* filter, String& namespaceString, JSBundlerPluginNativeOnBeforeParseCallback callback, const char* name, NapiExternal* external); - Vector* group(const String& namespaceStr, unsigned& index) + Vector* group(const String& namespaceStr, unsigned& index) { if (namespaceStr.isEmpty()) { index = std::numeric_limits::max(); diff --git a/src/bun.js/bindings/webcore/Event.cpp b/src/bun.js/bindings/webcore/Event.cpp index e68c829d92473a..a834a5e33697b3 100644 --- a/src/bun.js/bindings/webcore/Event.cpp +++ b/src/bun.js/bindings/webcore/Event.cpp @@ -137,11 +137,16 @@ void Event::setCurrentTarget(EventTarget* currentTarget, std::optional isI m_currentTargetIsInShadowTree = false; // m_currentTargetIsInShadowTree = isInShadowTree ? *isInShadowTree : (is(currentTarget) && downcast(*currentTarget).isInShadowTree()); } -Vector Event::composedPath() const +void Event::setEventPath(const EventPath& path) { - // if (!m_eventPath) - return Vector(); - // return m_eventPath->computePathUnclosedToTarget(*m_currentTarget); + m_eventPath = &path; +} + +Vector> Event::composedPath() const +{ + if (!m_eventPath) + return Vector>(); + return m_eventPath->computePathUnclosedToTarget(*m_currentTarget); } void Event::setUnderlyingEvent(Event* underlyingEvent) diff --git a/src/bun.js/bindings/webcore/Event.h b/src/bun.js/bindings/webcore/Event.h index 5b7d5cba2a88e6..748251f9b44f6d 100644 --- a/src/bun.js/bindings/webcore/Event.h +++ b/src/bun.js/bindings/webcore/Event.h @@ -91,8 +91,8 @@ class Event : public ScriptWrappable, public RefCounted { DOMHighResTimeStamp timeStampForBindings(ScriptExecutionContext&) const; MonotonicTime timeStamp() const { return m_createTime; } - void setEventPath(const EventPath& path) { UNUSED_PARAM(path); } - Vector composedPath() const; + void setEventPath(const EventPath&); + Vector> composedPath() const; void stopPropagation() { m_propagationStopped = true; } void stopImmediatePropagation() { m_immediatePropagationStopped = true; } diff --git a/src/bun.js/bindings/webcore/EventPath.cpp b/src/bun.js/bindings/webcore/EventPath.cpp index bb285566c332bb..5be68a187f073f 100644 --- a/src/bun.js/bindings/webcore/EventPath.cpp +++ b/src/bun.js/bindings/webcore/EventPath.cpp @@ -237,56 +237,62 @@ EventPath::EventPath(Node& originalTarget, Event& event) // #endif -// // https://dom.spec.whatwg.org/#dom-event-composedpath -// // Any node whose depth computed in EventPath::buildPath is greater than the context object is excluded. -// // Because we can exit out of a closed shadow tree and re-enter another closed shadow tree via a slot, -// // we decrease the *allowed depth* whenever we moved to a "shallower" (closer-to-document) tree. -// Vector EventPath::computePathUnclosedToTarget(const EventTarget& target) const -// { -// Vector path; -// auto pathSize = m_path.size(); -// RELEASE_ASSERT(pathSize); -// path.reserveInitialCapacity(pathSize); - -// auto currentTargetIndex = m_path.findIf([&target](auto& context) { -// return context.currentTarget() == ⌖ -// }); -// RELEASE_ASSERT(currentTargetIndex != notFound); -// auto currentTargetDepth = m_path[currentTargetIndex].closedShadowDepth(); - -// auto appendTargetWithLesserDepth = [&path](const EventContext& currentContext, int& currentDepthAllowed) { -// auto depth = currentContext.closedShadowDepth(); -// bool contextIsInsideInnerShadowTree = depth > currentDepthAllowed; -// if (contextIsInsideInnerShadowTree) -// return; -// bool movedOutOfShadowTree = depth < currentDepthAllowed; -// if (movedOutOfShadowTree) -// currentDepthAllowed = depth; -// path.unsafeAppendWithoutCapacityCheck(currentContext.currentTarget()); -// }; - -// auto currentDepthAllowed = currentTargetDepth; -// auto i = currentTargetIndex; -// do { -// appendTargetWithLesserDepth(m_path[i], currentDepthAllowed); -// } while (i--); -// path.reverse(); - -// currentDepthAllowed = currentTargetDepth; -// for (auto i = currentTargetIndex + 1; i < pathSize; ++i) -// appendTargetWithLesserDepth(m_path[i], currentDepthAllowed); - -// return path; -// } +// https://dom.spec.whatwg.org/#dom-event-composedpath +// Any node whose depth computed in EventPath::buildPath is greater than the context object is excluded. +// Because we can exit out of a closed shadow tree and re-enter another closed shadow tree via a slot, +// we decrease the *allowed depth* whenever we moved to a "shallower" (closer-to-document) tree. +Vector> EventPath::computePathUnclosedToTarget(const EventTarget& target) const +{ + Vector> path; + auto pathSize = m_path.size(); + RELEASE_ASSERT(pathSize); + path.reserveInitialCapacity(pathSize); + + auto currentTargetIndex = m_path.findIf([&target](auto& context) { + return context.currentTarget() == ⌖ + }); + RELEASE_ASSERT(currentTargetIndex != notFound); + auto currentTargetDepth = m_path[currentTargetIndex].closedShadowDepth(); + + auto appendTargetWithLesserDepth = [&path](const EventContext& currentContext, int& currentDepthAllowed) { + auto depth = currentContext.closedShadowDepth(); + bool contextIsInsideInnerShadowTree = depth > currentDepthAllowed; + if (contextIsInsideInnerShadowTree) + return; + bool movedOutOfShadowTree = depth < currentDepthAllowed; + if (movedOutOfShadowTree) + currentDepthAllowed = depth; + path.append(*currentContext.currentTarget()); + }; + + auto currentDepthAllowed = currentTargetDepth; + auto i = currentTargetIndex; + do { + appendTargetWithLesserDepth(m_path[i], currentDepthAllowed); + } while (i--); + path.reverse(); + + currentDepthAllowed = currentTargetDepth; + for (auto i = currentTargetIndex + 1; i < pathSize; ++i) + appendTargetWithLesserDepth(m_path[i], currentDepthAllowed); + + return path; +} -EventPath::EventPath(const WTF::Vector& targets) +EventPath::EventPath(const Vector& targets) { m_path = targets.map([&](auto* target) { ASSERT(target); + // ASSERT(!is(target)); return EventContext { EventContext::Type::Normal, nullptr, target, *targets.begin(), 0 }; }); } +EventPath::EventPath(EventTarget& target) +{ + m_path = { EventContext { EventContext::Type::Normal, nullptr, &target, &target, 0 } }; +} + // static Node* moveOutOfAllShadowRoots(Node& startingNode) // { // Node* node = &startingNode; diff --git a/src/bun.js/bindings/webcore/EventPath.h b/src/bun.js/bindings/webcore/EventPath.h index feb86fb7bf0570..2caee6588dd9c0 100644 --- a/src/bun.js/bindings/webcore/EventPath.h +++ b/src/bun.js/bindings/webcore/EventPath.h @@ -35,13 +35,14 @@ class EventPath { public: EventPath(Node& origin, Event&); explicit EventPath(const Vector&); + explicit EventPath(EventTarget&); bool isEmpty() const { return m_path.isEmpty(); } size_t size() const { return m_path.size(); } const EventContext& contextAt(size_t i) const { return m_path[i]; } EventContext& contextAt(size_t i) { return m_path[i]; } - Vector computePathUnclosedToTarget(const EventTarget&) const; + Vector> computePathUnclosedToTarget(const EventTarget&) const; static Node* eventTargetRespectingTargetRules(Node&); diff --git a/src/bun.js/bindings/webcore/EventTarget.cpp b/src/bun.js/bindings/webcore/EventTarget.cpp index 1a859de05f15af..0ba12bf3df5d7e 100644 --- a/src/bun.js/bindings/webcore/EventTarget.cpp +++ b/src/bun.js/bindings/webcore/EventTarget.cpp @@ -31,6 +31,7 @@ #include "config.h" #include "Event.h" +#include "EventPath.h" #include "EventTarget.h" @@ -248,10 +249,12 @@ void EventTarget::dispatchEvent(Event& event) ASSERT(event.isInitialized()); ASSERT(!event.isBeingDispatched()); + EventPath eventPath(*this); event.setTarget(this); event.setCurrentTarget(this); event.setEventPhase(Event::AT_TARGET); event.resetBeforeDispatch(); + event.setEventPath(eventPath); fireEventListeners(event, EventInvokePhase::Capturing); fireEventListeners(event, EventInvokePhase::Bubbling); event.resetAfterDispatch(); diff --git a/src/bun.js/bindings/webcore/JSCustomEvent.cpp b/src/bun.js/bindings/webcore/JSCustomEvent.cpp index dbc73d293d8d75..fef4d409a8d20b 100644 --- a/src/bun.js/bindings/webcore/JSCustomEvent.cpp +++ b/src/bun.js/bindings/webcore/JSCustomEvent.cpp @@ -47,6 +47,7 @@ #include #include #include +#include "../ErrorCode.h" namespace WebCore { using namespace JSC; @@ -58,7 +59,8 @@ template<> CustomEvent::Init convertDictionary(JSGlobalObject bool isNullOrUndefined = value.isUndefinedOrNull(); auto* object = isNullOrUndefined ? nullptr : value.getObject(); if (UNLIKELY(!isNullOrUndefined && !object)) { - throwTypeError(&lexicalGlobalObject, throwScope); + Bun::throwError(&lexicalGlobalObject, throwScope, Bun::ErrorCode::ERR_INVALID_ARG_TYPE, + "The \"options\" argument must be of type object."_s); return {}; } CustomEvent::Init result; diff --git a/src/bun.js/bindings/webcore/JSEventTargetNode.cpp b/src/bun.js/bindings/webcore/JSEventTargetNode.cpp index 7fc9eec1d340cc..314b64d55ef1c4 100644 --- a/src/bun.js/bindings/webcore/JSEventTargetNode.cpp +++ b/src/bun.js/bindings/webcore/JSEventTargetNode.cpp @@ -8,6 +8,7 @@ #include "JSEventTarget.h" #include "JavaScriptCore/JSArray.h" #include "wtf/text/MakeString.h" +#include "../ErrorCode.h" namespace Bun { @@ -29,8 +30,10 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionNodeEventsGetEventListeners, (JSGlobalObject auto eventType = callFrame->argument(1).toWTFString(globalObject); RETURN_IF_EXCEPTION(throwScope, {}); - if (UNLIKELY(!thisObject)) - return JSValue::encode(constructEmptyArray(globalObject, nullptr, 0)); + if (UNLIKELY(!thisObject)) { + return Bun::throwError(globalObject, throwScope, Bun::ErrorCode::ERR_INVALID_ARG_TYPE, + "ERR_INVALID_ARG_TYPE: first argument must be of type EventEmitter"_s); + } MarkedArgumentBuffer values; auto& listeners = thisObject->wrapped().eventListeners(WTF::makeAtomString(eventType)); diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index c8c6c19393647c..a7d419ee91ac04 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -184,9 +184,9 @@ fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: [] \\ "mappings": "{}" \\}} , .{ - bun.fmt.formatJSONStringUTF8(std.fs.path.basename(specifier)), - bun.fmt.formatJSONStringUTF8(specifier), - bun.fmt.formatJSONStringUTF8(source_file), + bun.fmt.formatJSONStringUTF8(std.fs.path.basename(specifier), .{}), + bun.fmt.formatJSONStringUTF8(specifier, .{}), + bun.fmt.formatJSONStringUTF8(source_file, .{}), mappings.formatVLQs(), }); try bufw.flush(); diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 3059002802d328..66c40186343360 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -4018,7 +4018,8 @@ pub const ParseTask = struct { const OnBeforeParseResultWrapper = struct { original_source: ?[]const u8 = null, loader: Loader, - impl: OnBeforeParseResult, + check: if (bun.Environment.isDebug) u32 else u0 = if (bun.Environment.isDebug) 42069 else 0, // Value to ensure OnBeforeParseResult is wrapped in this struct + result: OnBeforeParseResult, }; const OnBeforeParseResult = extern struct { @@ -4027,7 +4028,7 @@ pub const ParseTask = struct { source_len: usize = 0, loader: Loader, - fetch_source_code_fn: *const fn (*const OnBeforeParseArguments, *OnBeforeParseResult) callconv(.C) i32 = &fetchSourceCode, + fetch_source_code_fn: *const fn (*OnBeforeParseArguments, *OnBeforeParseResult) callconv(.C) i32 = &fetchSourceCode, user_context: ?*anyopaque = null, free_user_context: ?*const fn (?*anyopaque) callconv(.C) void = null, @@ -4036,9 +4037,15 @@ pub const ParseTask = struct { args_: ?*OnBeforeParseArguments, log_options_: ?*BunLogOptions, ) callconv(.C) void = &BunLogOptions.logFn, + + pub fn getWrapper(result: *OnBeforeParseResult) *OnBeforeParseResultWrapper { + const wrapper: *OnBeforeParseResultWrapper = @fieldParentPtr("result", result); + bun.debugAssert(wrapper.check == 42069); + return wrapper; + } }; - pub fn fetchSourceCode(args: *const OnBeforeParseArguments, result: *OnBeforeParseResult) callconv(.C) i32 { + pub fn fetchSourceCode(args: *OnBeforeParseArguments, result: *OnBeforeParseResult) callconv(.C) i32 { debug("fetchSourceCode", .{}); const this = args.context; if (this.log.errors > 0 or this.deferred_error != null or this.should_continue_running.* != 1) { @@ -4069,17 +4076,35 @@ pub const ParseTask = struct { result.source_len = entry.contents.len; result.free_user_context = null; result.user_context = null; + const wrapper: *OnBeforeParseResultWrapper = result.getWrapper(); + wrapper.original_source = entry.contents; return 0; } + pub export fn OnBeforeParseResult__reset(this: *OnBeforeParseResult) void { + const wrapper = this.getWrapper(); + this.loader = wrapper.loader; + if (wrapper.original_source) |src| { + this.source_ptr = src.ptr; + this.source_len = src.len; + } else { + this.source_ptr = null; + this.source_len = 0; + } + } + pub export fn OnBeforeParsePlugin__isDone(this: *OnBeforeParsePlugin) i32 { if (this.should_continue_running.* != 1) { return 1; } const result = this.result orelse return 1; + // The first plugin to set the source wins. + // But, we must check that they actually modified it + // since fetching the source stores it inside `result.source_ptr` if (result.source_ptr != null) { - return 1; + const wrapper: *OnBeforeParseResultWrapper = result.getWrapper(); + return @intFromBool(result.source_ptr.? != wrapper.original_source.?.ptr); } return 0; @@ -4096,10 +4121,14 @@ pub const ParseTask = struct { args.namespace_ptr = this.file_path.namespace.ptr; args.namespace_len = this.file_path.namespace.len; } - var result = OnBeforeParseResult{ + var wrapper = OnBeforeParseResultWrapper{ .loader = this.loader.*, + .result = OnBeforeParseResult{ + .loader = this.loader.*, + }, }; - this.result = &result; + + this.result = &wrapper.result; const count = plugin.callOnBeforeParsePlugins( this, if (bun.strings.eqlComptime(this.file_path.namespace, "file")) @@ -4109,15 +4138,15 @@ pub const ParseTask = struct { &bun.String.init(this.file_path.text), &args, - &result, + &wrapper.result, this.should_continue_running, ); if (comptime Environment.enable_logs) debug("callOnBeforeParsePlugins({s}:{s}) = {d}", .{ this.file_path.namespace, this.file_path.text, count }); if (count > 0) { if (this.deferred_error) |err| { - if (result.free_user_context) |free_user_context| { - free_user_context(result.user_context); + if (wrapper.result.free_user_context) |free_user_context| { + free_user_context(wrapper.result.user_context); } return err; @@ -4125,7 +4154,7 @@ pub const ParseTask = struct { // If the plugin sets the `free_user_context` function pointer, it _must_ set the `user_context` pointer. // Otherwise this is just invalid behavior. - if (result.user_context == null and result.free_user_context != null) { + if (wrapper.result.user_context == null and wrapper.result.free_user_context != null) { var msg = Logger.Msg{ .data = .{ .location = null, .text = bun.default_allocator.dupe( u8, "Native plugin set the `free_plugin_source_code_context` field without setting the `plugin_source_code_context` field.", @@ -4137,27 +4166,27 @@ pub const ParseTask = struct { } if (this.log.errors > 0) { - if (result.free_user_context) |free_user_context| { - free_user_context(result.user_context); + if (wrapper.result.free_user_context) |free_user_context| { + free_user_context(wrapper.result.user_context); } return error.SyntaxError; } - if (result.source_ptr) |ptr| { - if (result.free_user_context != null) { + if (wrapper.result.source_ptr) |ptr| { + if (wrapper.result.free_user_context != null) { this.task.external = CacheEntry.External{ - .ctx = result.user_context, - .function = result.free_user_context, + .ctx = wrapper.result.user_context, + .function = wrapper.result.free_user_context, }; } from_plugin.* = true; - this.loader.* = result.loader; + this.loader.* = wrapper.result.loader; return CacheEntry{ - .contents = ptr[0..result.source_len], + .contents = ptr[0..wrapper.result.source_len], .external = .{ - .ctx = result.user_context, - .function = result.free_user_context, + .ctx = wrapper.result.user_context, + .function = wrapper.result.free_user_context, }, }; } diff --git a/src/cli.zig b/src/cli.zig index 17adb70c0480b2..49a9124216e25f 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -21,6 +21,7 @@ const js_ast = bun.JSAst; const linker = @import("linker.zig"); const RegularExpression = bun.RegularExpression; const builtin = @import("builtin"); +const File = bun.sys.File; const debug = Output.scoped(.CLI, true); @@ -2133,7 +2134,15 @@ pub const Command = struct { if (strings.eqlComptime(extension, ".lockb")) { for (bun.argv) |arg| { if (strings.eqlComptime(arg, "--hash")) { - try PackageManagerCommand.printHash(ctx, ctx.args.entry_points[0]); + var path_buf: bun.PathBuffer = undefined; + @memcpy(path_buf[0..ctx.args.entry_points[0].len], ctx.args.entry_points[0]); + path_buf[ctx.args.entry_points[0].len] = 0; + const lockfile_path = path_buf[0..ctx.args.entry_points[0].len :0]; + const file = File.open(lockfile_path, bun.O.RDONLY, 0).unwrap() catch |err| { + Output.err(err, "failed to open lockfile", .{}); + Global.crash(); + }; + try PackageManagerCommand.printHash(ctx, file); return; } } diff --git a/src/cli/outdated_command.zig b/src/cli/outdated_command.zig index 0a951002ba0e3a..f24d333182bcb3 100644 --- a/src/cli/outdated_command.zig +++ b/src/cli/outdated_command.zig @@ -44,11 +44,10 @@ pub const OutdatedCommand = struct { } fn outdated(ctx: Command.Context, original_cwd: string, manager: *PackageManager, comptime log_level: PackageManager.Options.LogLevel) !void { - const load_lockfile_result = manager.lockfile.loadFromDisk( + const load_lockfile_result = manager.lockfile.loadFromCwd( manager, manager.allocator, manager.log, - manager.options.lockfile_path, true, ); diff --git a/src/cli/pack_command.zig b/src/cli/pack_command.zig index 1ad073b2654ae1..195a9c3edfad8c 100644 --- a/src/cli/pack_command.zig +++ b/src/cli/pack_command.zig @@ -102,11 +102,10 @@ pub const PackCommand = struct { Output.flush(); var lockfile: Lockfile = undefined; - const load_from_disk_result = lockfile.loadFromDisk( + const load_from_disk_result = lockfile.loadFromCwd( manager, manager.allocator, manager.log, - manager.options.lockfile_path, false, ); diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index 03080a509424f5..9151b94d6f6fd2 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -15,7 +15,7 @@ const PackageID = Install.PackageID; const DependencyID = Install.DependencyID; const PackageManager = Install.PackageManager; const Lockfile = @import("../install/lockfile.zig"); -const NodeModulesFolder = Lockfile.Tree.NodeModulesFolder; +const NodeModulesFolder = Lockfile.Tree.Iterator(.node_modules).Next; const Path = @import("../resolver/resolve_path.zig"); const String = @import("../install/semver.zig").String; const ArrayIdentityContext = bun.ArrayIdentityContext; @@ -26,6 +26,7 @@ const DefaultTrustedCommand = @import("./pm_trusted_command.zig").DefaultTrusted const Environment = bun.Environment; pub const PackCommand = @import("./pack_command.zig").PackCommand; const Npm = Install.Npm; +const File = bun.sys.File; const ByName = struct { dependencies: []const Dependency, @@ -41,7 +42,7 @@ const ByName = struct { }; pub const PackageManagerCommand = struct { - pub fn handleLoadLockfileErrors(load_lockfile: Lockfile.LoadFromDiskResult, pm: *PackageManager) void { + pub fn handleLoadLockfileErrors(load_lockfile: Lockfile.LoadResult, pm: *PackageManager) void { if (load_lockfile == .not_found) { if (pm.options.log_level != .silent) { Output.errGeneric("Lockfile not found", .{}); @@ -57,17 +58,20 @@ pub const PackageManagerCommand = struct { } } - pub fn printHash(ctx: Command.Context, lockfile_: []const u8) !void { + pub fn printHash(ctx: Command.Context, file: File) !void { @setCold(true); - var lockfile_buffer: bun.PathBuffer = undefined; - @memcpy(lockfile_buffer[0..lockfile_.len], lockfile_); - lockfile_buffer[lockfile_.len] = 0; - const lockfile = lockfile_buffer[0..lockfile_.len :0]; + const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .pm); var pm, const cwd = try PackageManager.init(ctx, cli, PackageManager.Subcommand.pm); defer ctx.allocator.free(cwd); - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, lockfile, true); + const bytes = file.readToEnd(ctx.allocator).unwrap() catch |err| { + Output.err(err, "failed to read lockfile", .{}); + Global.crash(); + }; + + const load_lockfile = pm.lockfile.loadFromBytes(pm, bytes, ctx.allocator, ctx.log); + handleLoadLockfileErrors(load_lockfile, pm); Output.flush(); @@ -198,7 +202,7 @@ pub const PackageManagerCommand = struct { Output.flush(); return; } else if (strings.eqlComptime(subcommand, "hash")) { - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); handleLoadLockfileErrors(load_lockfile, pm); _ = try pm.lockfile.hasMetaHashChanged(false, pm.lockfile.packages.len); @@ -209,7 +213,7 @@ pub const PackageManagerCommand = struct { Output.enableBuffering(); Global.exit(0); } else if (strings.eqlComptime(subcommand, "hash-print")) { - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); handleLoadLockfileErrors(load_lockfile, pm); Output.flush(); @@ -218,7 +222,7 @@ pub const PackageManagerCommand = struct { Output.enableBuffering(); Global.exit(0); } else if (strings.eqlComptime(subcommand, "hash-string")) { - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); handleLoadLockfileErrors(load_lockfile, pm); _ = try pm.lockfile.hasMetaHashChanged(true, pm.lockfile.packages.len); @@ -291,19 +295,19 @@ pub const PackageManagerCommand = struct { try TrustCommand.exec(ctx, pm, args); Global.exit(0); } else if (strings.eqlComptime(subcommand, "ls")) { - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); handleLoadLockfileErrors(load_lockfile, pm); Output.flush(); Output.disableBuffering(); const lockfile = load_lockfile.ok.lockfile; - var iterator = Lockfile.Tree.Iterator.init(lockfile); + var iterator = Lockfile.Tree.Iterator(.node_modules).init(lockfile); var max_depth: usize = 0; var directories = std.ArrayList(NodeModulesFolder).init(ctx.allocator); defer directories.deinit(); - while (iterator.nextNodeModulesFolder(null)) |node_modules| { + while (iterator.next(null)) |node_modules| { const path_len = node_modules.relative_path.len; const path = try ctx.allocator.alloc(u8, path_len + 1); bun.copy(u8, path, node_modules.relative_path); @@ -341,7 +345,7 @@ pub const PackageManagerCommand = struct { const resolutions = slice.items(.resolution); const root_deps = slice.items(.dependencies)[0]; - Output.println("{s} node_modules ({d})", .{ path, dependencies.len }); + Output.println("{s} node_modules ({d})", .{ path, lockfile.buffers.hoisted_dependencies.items.len }); const string_bytes = lockfile.buffers.string_bytes.items; const sorted_dependencies = try ctx.allocator.alloc(DependencyID, root_deps.len); defer ctx.allocator.free(sorted_dependencies); @@ -369,21 +373,29 @@ pub const PackageManagerCommand = struct { Global.exit(0); } else if (strings.eqlComptime(subcommand, "migrate")) { - if (!pm.options.enable.force_save_lockfile) try_load_bun: { - std.fs.cwd().accessZ("bun.lockb", .{ .mode = .read_only }) catch break :try_load_bun; + if (!pm.options.enable.force_save_lockfile) { + if (bun.sys.existsZ("bun.lock")) { + Output.prettyErrorln( + \\error: bun.lock already exists + \\run with --force to overwrite + , .{}); + Global.exit(1); + } - Output.prettyErrorln( - \\error: bun.lockb already exists - \\run with --force to overwrite - , .{}); - Global.exit(1); + if (bun.sys.existsZ("bun.lockb")) { + Output.prettyErrorln( + \\error: bun.lockb already exists + \\run with --force to overwrite + , .{}); + Global.exit(1); + } } const load_lockfile = @import("../install/migration.zig").detectAndLoadOtherLockfile( pm.lockfile, + bun.FD.cwd(), pm, ctx.allocator, pm.log, - pm.options.lockfile_path, ); if (load_lockfile == .not_found) { Output.prettyErrorln( @@ -393,7 +405,9 @@ pub const PackageManagerCommand = struct { } handleLoadLockfileErrors(load_lockfile, pm); const lockfile = load_lockfile.ok.lockfile; - lockfile.saveToDisk(pm.options.lockfile_path, pm.options.log_level.isVerbose()); + + const save_format: Lockfile.LoadResult.LockfileFormat = if (pm.options.save_text_lockfile) .text else .binary; + lockfile.saveToDisk(save_format, pm.options.log_level.isVerbose()); Global.exit(0); } diff --git a/src/cli/pm_trusted_command.zig b/src/cli/pm_trusted_command.zig index 4528ce8bfa4815..a26f46a4f08b1b 100644 --- a/src/cli/pm_trusted_command.zig +++ b/src/cli/pm_trusted_command.zig @@ -37,12 +37,11 @@ pub const UntrustedCommand = struct { Output.prettyError("bun pm untrusted v" ++ Global.package_json_version_with_sha ++ "\n\n", .{}); Output.flush(); - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); PackageManagerCommand.handleLoadLockfileErrors(load_lockfile, pm); try pm.updateLockfileIfNeeded(load_lockfile); const packages = pm.lockfile.packages.slice(); - const metas: []Lockfile.Package.Meta = packages.items(.meta); const scripts: []Lockfile.Package.Scripts = packages.items(.scripts); const resolutions: []Install.Resolution = packages.items(.resolution); const buf = pm.lockfile.buffers.string_bytes.items; @@ -59,10 +58,8 @@ pub const UntrustedCommand = struct { // called alias because a dependency name is not always the package name const alias = dep.name.slice(buf); - if (metas[package_id].hasInstallScript()) { - if (!pm.lockfile.hasTrustedDependency(alias)) { - try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); - } + if (!pm.lockfile.hasTrustedDependency(alias)) { + try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); } } @@ -74,7 +71,7 @@ pub const UntrustedCommand = struct { var untrusted_deps: std.AutoArrayHashMapUnmanaged(DependencyID, Lockfile.Package.Scripts.List) = .{}; defer untrusted_deps.deinit(ctx.allocator); - var tree_iterator = Lockfile.Tree.Iterator.init(pm.lockfile); + var tree_iterator = Lockfile.Tree.Iterator(.node_modules).init(pm.lockfile); const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir); var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{}; @@ -82,7 +79,7 @@ pub const UntrustedCommand = struct { try abs_node_modules_path.appendSlice(ctx.allocator, top_level_without_trailing_slash); try abs_node_modules_path.append(ctx.allocator, std.fs.path.sep); - while (tree_iterator.nextNodeModulesFolder(null)) |node_modules| { + while (tree_iterator.next(null)) |node_modules| { // + 1 because we want to keep the path separator abs_node_modules_path.items.len = top_level_without_trailing_slash.len + 1; try abs_node_modules_path.appendSlice(ctx.allocator, node_modules.relative_path); @@ -187,7 +184,7 @@ pub const TrustCommand = struct { if (args.len == 2) errorExpectedArgs(); - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); PackageManagerCommand.handleLoadLockfileErrors(load_lockfile, pm); try pm.updateLockfileIfNeeded(load_lockfile); @@ -203,7 +200,6 @@ pub const TrustCommand = struct { const buf = pm.lockfile.buffers.string_bytes.items; const packages = pm.lockfile.packages.slice(); - const metas: []Lockfile.Package.Meta = packages.items(.meta); const resolutions: []Install.Resolution = packages.items(.resolution); const scripts: []Lockfile.Package.Scripts = packages.items(.scripts); @@ -216,10 +212,8 @@ pub const TrustCommand = struct { const alias = dep.name.slice(buf); - if (metas[package_id].hasInstallScript()) { - if (!pm.lockfile.hasTrustedDependency(alias)) { - try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); - } + if (!pm.lockfile.hasTrustedDependency(alias)) { + try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); } } @@ -231,7 +225,7 @@ pub const TrustCommand = struct { // Instead of running them right away, we group scripts by depth in the node_modules // file structure, then run them starting at max depth. This ensures lifecycle scripts are run // in the correct order as they would during a normal install - var tree_iter = Lockfile.Tree.Iterator.init(pm.lockfile); + var tree_iter = Lockfile.Tree.Iterator(.node_modules).init(pm.lockfile); const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir); var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{}; @@ -248,7 +242,7 @@ pub const TrustCommand = struct { var scripts_count: usize = 0; - while (tree_iter.nextNodeModulesFolder(null)) |node_modules| { + while (tree_iter.next(null)) |node_modules| { abs_node_modules_path.items.len = top_level_without_trailing_slash.len + 1; try abs_node_modules_path.appendSlice(ctx.allocator, node_modules.relative_path); @@ -423,7 +417,14 @@ pub const TrustCommand = struct { try pm.lockfile.trusted_dependencies.?.put(ctx.allocator, @truncate(String.Builder.stringHash(name)), {}); } - pm.lockfile.saveToDisk(pm.options.lockfile_path, pm.options.log_level.isVerbose()); + const save_format: Lockfile.LoadResult.LockfileFormat = if (pm.options.save_text_lockfile) + .text + else switch (load_lockfile) { + .not_found => .binary, + .err => |err| err.format, + .ok => |ok| ok.format, + }; + pm.lockfile.saveToDisk(save_format, pm.options.log_level.isVerbose()); var buffer_writer = try bun.js_printer.BufferWriter.init(ctx.allocator); try buffer_writer.buffer.list.ensureTotalCapacity(ctx.allocator, package_json_contents.len + 1); diff --git a/src/cli/publish_command.zig b/src/cli/publish_command.zig index 466f7ef488cd06..b62ae823d1ee0c 100644 --- a/src/cli/publish_command.zig +++ b/src/cli/publish_command.zig @@ -281,11 +281,10 @@ pub const PublishCommand = struct { manager: *PackageManager, ) FromWorkspaceError!Context(directory_publish) { var lockfile: Lockfile = undefined; - const load_from_disk_result = lockfile.loadFromDisk( + const load_from_disk_result = lockfile.loadFromCwd( manager, manager.allocator, manager.log, - manager.options.lockfile_path, false, ); diff --git a/src/codegen/generate-node-errors.ts b/src/codegen/generate-node-errors.ts index 6dfcedb4e1f096..debbb07fc50da9 100644 --- a/src/codegen/generate-node-errors.ts +++ b/src/codegen/generate-node-errors.ts @@ -67,7 +67,8 @@ pub const Error = enum(u8) { let i = 0; let listForUsingNamespace = ""; -for (const [code, constructor, name] of NodeErrors) { +for (let [code, constructor, name] of NodeErrors) { + if (name == null) name = constructor.name; enumHeader += ` ${code} = ${i},\n`; listHeader += ` { JSC::ErrorType::${constructor.name}, "${name}"_s, "${code}"_s },\n`; zig += ` ${code} = ${i},\n`; diff --git a/src/fmt.zig b/src/fmt.zig index 73277701cc7f76..33d25bef1db440 100644 --- a/src/fmt.zig +++ b/src/fmt.zig @@ -234,9 +234,18 @@ const JSONFormatter = struct { const JSONFormatterUTF8 = struct { input: []const u8, + opts: Options, + + pub const Options = struct { + quote: bool = true, + }; pub fn format(self: JSONFormatterUTF8, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - try bun.js_printer.writeJSONString(self.input, @TypeOf(writer), writer, .utf8); + if (self.opts.quote) { + try bun.js_printer.writeJSONString(self.input, @TypeOf(writer), writer, .utf8); + } else { + try bun.js_printer.writePreQuotedString(self.input, @TypeOf(writer), writer, '"', false, true, .utf8); + } } }; @@ -245,8 +254,8 @@ pub fn formatJSONString(text: []const u8) JSONFormatter { return .{ .input = text }; } -pub fn formatJSONStringUTF8(text: []const u8) JSONFormatterUTF8 { - return .{ .input = text }; +pub fn formatJSONStringUTF8(text: []const u8, opts: JSONFormatterUTF8.Options) JSONFormatterUTF8 { + return .{ .input = text, .opts = opts }; } const SharedTempBuffer = [32 * 1024]u8; diff --git a/src/install/bin.zig b/src/install/bin.zig index 1667455d2605ed..9837f80206cc29 100644 --- a/src/install/bin.zig +++ b/src/install/bin.zig @@ -17,6 +17,9 @@ const string = bun.string; const Install = @import("./install.zig"); const PackageInstall = Install.PackageInstall; const Dependency = @import("./dependency.zig"); +const OOM = bun.OOM; +const JSON = bun.JSON; +const Lockfile = Install.Lockfile; /// Normalized `bin` field in [package.json](https://docs.npmjs.com/cli/v8/configuring-npm/package-json#bin) /// Can be a: @@ -25,11 +28,16 @@ const Dependency = @import("./dependency.zig"); /// - map where keys are names of the binaries and values are file paths to the binaries pub const Bin = extern struct { tag: Tag = Tag.none, - _padding_tag: [3]u8 = .{0} ** 3, + unset: u8 = 0, + _padding_tag: [2]u8 = .{0} ** 2, // Largest member must be zero initialized value: Value = Value{ .map = ExternalStringList{} }, + pub fn isUnset(this: *const Bin) bool { + return this.unset != 0; + } + pub fn count(this: *const Bin, buf: []const u8, extern_strings: []const ExternalString, comptime StringBuilder: type, builder: StringBuilder) u32 { switch (this.tag) { .file => builder.count(this.value.file.slice(buf)), @@ -56,18 +64,21 @@ pub const Bin = extern struct { .none => { return Bin{ .tag = .none, + .unset = this.unset, .value = Value.init(.{ .none = {} }), }; }, .file => { return Bin{ .tag = .file, + .unset = this.unset, .value = Value.init(.{ .file = builder.append(String, this.value.file.slice(buf)) }), }; }, .named_file => { return Bin{ .tag = .named_file, + .unset = this.unset, .value = Value.init( .{ .named_file = [2]String{ @@ -81,6 +92,7 @@ pub const Bin = extern struct { .dir => { return Bin{ .tag = .dir, + .unset = this.unset, .value = Value.init(.{ .dir = builder.append(String, this.value.dir.slice(buf)) }), }; }, @@ -91,6 +103,7 @@ pub const Bin = extern struct { return Bin{ .tag = .map, + .unset = this.unset, .value = Value.init(.{ .map = ExternalStringList.init(all_extern_strings, extern_strings_slice) }), }; }, @@ -99,6 +112,130 @@ pub const Bin = extern struct { unreachable; } + pub fn cloneAppend(this: *const Bin, this_buf: string, this_extern_strings: []const ExternalString, lockfile: *Lockfile) OOM!Bin { + var string_buf = lockfile.stringBuf(); + defer string_buf.apply(lockfile); + + const cloned: Bin = .{ + .tag = this.tag, + .unset = this.unset, + + .value = switch (this.tag) { + .none => Value.init(.{ .none = {} }), + .file => Value.init(.{ + .file = try string_buf.append(this.value.file.slice(this_buf)), + }), + .named_file => Value.init(.{ .named_file = .{ + try string_buf.append(this.value.named_file[0].slice(this_buf)), + try string_buf.append(this.value.named_file[1].slice(this_buf)), + } }), + .dir => Value.init(.{ + .dir = try string_buf.append(this.value.dir.slice(this_buf)), + }), + .map => map: { + const off = lockfile.buffers.extern_strings.items.len; + for (this.value.map.get(this_extern_strings)) |extern_string| { + try lockfile.buffers.extern_strings.append( + lockfile.allocator, + try string_buf.appendExternal(extern_string.slice(this_buf)), + ); + } + const new = lockfile.buffers.extern_strings.items[off..]; + break :map Value.init(.{ + .map = ExternalStringList.init(lockfile.buffers.extern_strings.items, new), + }); + }, + }, + }; + + return cloned; + } + + /// Used for packages read from text lockfile. + pub fn parseAppend( + allocator: std.mem.Allocator, + bin_expr: JSON.Expr, + buf: *String.Buf, + extern_strings: *std.ArrayListUnmanaged(ExternalString), + ) OOM!Bin { + switch (bin_expr.data) { + .e_object => |obj| { + switch (obj.properties.len) { + 0 => {}, + 1 => { + const bin_name = obj.properties.ptr[0].key.?.asString(allocator) orelse return .{}; + const value = obj.properties.ptr[0].value.?.asString(allocator) orelse return .{}; + + return .{ + .tag = .named_file, + .value = .{ + .named_file = .{ + try buf.append(bin_name), + try buf.append(value), + }, + }, + }; + }, + else => { + const current_len = extern_strings.items.len; + const num_props: usize = obj.properties.len * 2; + try extern_strings.ensureTotalCapacityPrecise( + allocator, + current_len + num_props, + ); + var new = extern_strings.items.ptr[current_len .. current_len + num_props]; + extern_strings.items.len += num_props; + + var i: usize = 0; + for (obj.properties.slice()) |bin_prop| { + const key = bin_prop.key.?; + const value = bin_prop.value.?; + const key_str = key.asString(allocator) orelse return .{}; + const value_str = value.asString(allocator) orelse return .{}; + new[i] = try buf.appendExternal(key_str); + i += 1; + new[i] = try buf.appendExternal(value_str); + i += 1; + } + if (comptime Environment.allow_assert) { + bun.assert(i == new.len); + } + return .{ + .tag = .map, + .value = .{ + .map = ExternalStringList.init(extern_strings.items, new), + }, + }; + }, + } + }, + .e_string => |str| { + if (str.data.len > 0) { + return .{ + .tag = .file, + .value = .{ + .file = try buf.append(str.data), + }, + }; + } + }, + else => {}, + } + return .{}; + } + + pub fn parseAppendFromDirectories(allocator: std.mem.Allocator, bin_expr: JSON.Expr, buf: *String.Buf) OOM!Bin { + if (bin_expr.asString(allocator)) |bin_str| { + return .{ + .tag = .dir, + .value = .{ + .dir = try buf.append(bin_str), + }, + }; + } + return .{}; + } + pub fn init() Bin { return bun.serializable(.{ .tag = .none, .value = Value.init(.{ .none = {} }) }); } diff --git a/src/install/bun.lock.zig b/src/install/bun.lock.zig new file mode 100644 index 00000000000000..9e09dcc99a8717 --- /dev/null +++ b/src/install/bun.lock.zig @@ -0,0 +1,1596 @@ +const std = @import("std"); +const bun = @import("root").bun; +const string = bun.string; +const stringZ = bun.stringZ; +const strings = bun.strings; +const URL = bun.URL; +const PackageManager = bun.install.PackageManager; +const OOM = bun.OOM; +const logger = bun.logger; +const BinaryLockfile = bun.install.Lockfile; +const JSON = bun.JSON; +const Output = bun.Output; +const Expr = bun.js_parser.Expr; +const MutableString = bun.MutableString; +const DependencySlice = BinaryLockfile.DependencySlice; +const Install = bun.install; +const Dependency = Install.Dependency; +const PackageID = Install.PackageID; +const Semver = bun.Semver; +const String = Semver.String; +const Resolution = Install.Resolution; +const PackageNameHash = Install.PackageNameHash; +const NameHashMap = BinaryLockfile.NameHashMap; +const Repository = Install.Repository; +const Progress = bun.Progress; +const Environment = bun.Environment; +const Global = bun.Global; +const LoadResult = BinaryLockfile.LoadResult; +const TruncatedPackageNameHash = Install.TruncatedPackageNameHash; +const invalid_package_id = Install.invalid_package_id; +const Npm = Install.Npm; +const ExtractTarball = @import("./extract_tarball.zig"); +const Integrity = @import("./integrity.zig").Integrity; +const Meta = BinaryLockfile.Package.Meta; +const Negatable = Npm.Negatable; +const DependencyID = Install.DependencyID; +const invalid_dependency_id = Install.invalid_dependency_id; + +/// A property key in the `packages` field of the lockfile +pub const PkgPath = struct { + raw: string, + depth: u8, + + /// raw must be valid + /// fills buf with the path to dependency in node_modules. + /// e.g. loose-envify/js-tokens@4.0.0 -> node_modules/loose-envify/node_modules/js-tokens + pub fn path(this: PkgPath, path_buf: []u8, comptime sep: u8) stringZ { + var buf = path_buf; + var remain = this.raw; + + const end = loop: while (true) { + @memcpy(buf[0.."node_modules/".len], "node_modules" ++ [1]u8{sep}); + buf = buf["node_modules/".len..]; + + var at = strings.indexOfChar(remain, '@') orelse unreachable; + var slash = strings.indexOfChar(remain, '/') orelse break :loop at; + + if (at == 0) { + // scoped package, find next '@' and '/' + at += 1 + (strings.indexOfChar(remain[1..], '@') orelse unreachable); + slash += 1 + (strings.indexOfChar(remain[slash + 1 ..], '/') orelse { + break :loop at; + }); + } + + if (at < slash) { + // slash is in the version + break :loop at; + } + + @memcpy(buf[0..slash], remain[0..slash]); + buf[slash] = sep; + buf = buf[slash + 1 ..]; + remain = remain[slash + 1 ..]; + }; + + @memcpy(buf[0..end], remain[0..end]); + buf = buf[end..]; + buf[0] = 0; + return path_buf[0 .. @intFromPtr(buf.ptr) - @intFromPtr(path_buf.ptr) :0]; + } + + pub fn reverseIterator(input: string) Iterator { + return .{ + .input = input, + .i = @intCast(input.len), + }; + } + + pub const ReverseIterator = struct { + input: string, + i: u32, + + pub fn next(this: *ReverseIterator) error{InvalidPackageKey}!?string { + if (this.i == 0) return null; + + const remain = this.input[0..this.i]; + if (remain.len == 0) return error.InvalidPackageKey; + + const slash = strings.indexOfCharNeg(remain, '/') orelse { + // the end + const name = remain; + this.i = 0; + return name; + }; + + // if this is the second component of a scoped package an '@' + // will begin the next + const at = strings.indexOfCharNeg(remain, '@') orelse { + const name = this.input[slash + 1 .. this.i]; + this.i = slash; + return name; + }; + + if (at < slash) { + return error.InvalidPackageKey; + } + + const next_slash = strings.indexOfCharNeg(remain[0..slash]) orelse { + // if `@` exists there must be another slash unless the first package + // is a scoped package + if (at != 0) { + return error.InvalidPackageKey; + } + + const name = remain; + this.i = 0; + return name; + }; + + if (next_slash + 1 != at) { + return error.InvalidPackageKey; + } + + const name = this.input[next_slash + 1 .. this.i]; + this.i = next_slash; + return name; + } + + pub fn first(this: *ReverseIterator) error{InvalidPackageKey}!string { + bun.debugAssert(this.i == this.input.len); + + return this.next() orelse return error.InvalidPackageKey; + } + }; + + pub fn iterator(input: string) Iterator { + return .{ + .input = input, + .i = 0, + }; + } + + pub const Iterator = struct { + input: string, + i: u32, + version_offset: ?u32 = null, + + pub fn next(this: *Iterator) error{InvalidPackageKey}!?string { + if (this.i == this.input.len) return null; + + var remain = this.input[this.i..]; + + var maybe_at = strings.indexOfChar(remain, '@'); + var slash = strings.indexOfChar(remain, '/') orelse { + // no slashes left, it's the last dependency name. + // '@' will only exist if '/' exists (scoped package) + if (maybe_at != null) return error.InvalidPackageKey; + this.i = @intCast(this.input.len); + return remain; + }; + + if (maybe_at == null) { + if (slash + 1 == this.input.len) return error.InvalidPackageKey; + this.i += slash + 1; + return remain[0..slash]; + } + + if (maybe_at.? == 0) { + // scoped package, find next '/' and '@' if it exists + maybe_at = strings.indexOfChar(remain[1..], '@'); + slash += 1 + (strings.indexOfChar(remain[slash + 1 ..], '/') orelse { + if (maybe_at != null) return error.InvalidPackageKey; + this.i = @intCast(this.input.len); + return remain; + }); + } + + if (maybe_at) |at| { + if (at + 1 < slash) { + // both '@' and '/' exist and it's not a scoped package, so + // '@' must be greater than '/' + return error.InvalidPackageKey; + } + } + + this.i += slash + 1; + return remain[0..slash]; + } + + /// There will always be at least one component to this path. Return + /// an error if none is found (empty string) + pub fn first(this: *Iterator) error{InvalidPackageKey}!string { + bun.assertWithLocation(this.i == 0, @src()); + return try this.next() orelse error.InvalidPackageKey; + } + }; + + pub fn fromLockfile(input: string) PkgPath { + return .{ + .raw = input, + .depth = 0, + }; + } + + pub const Map = struct { + root: Node, + + const Nodes = bun.StringArrayHashMapUnmanaged(Node); + + pub const Node = struct { + pkg_id: PackageID, + dep_id: DependencyID, + parent: ?*Node, + nodes: Nodes, + + pub fn deinit(this: *Node, allocator: std.mem.Allocator) void { + for (this.nodes.values()) |*node| { + node.deinit(allocator); + } + + this.nodes.deinit(allocator); + } + }; + + pub fn init() Map { + return .{ + .root = .{ + .pkg_id = 0, + .dep_id = invalid_dependency_id, + .parent = null, + .nodes = .{}, + }, + }; + } + + pub fn deinit(this: *Map, allocator: std.mem.Allocator) void { + for (this.root.nodes.values()) |*node| { + node.deinit(allocator); + } + } + + const InsertError = OOM || error{ + InvalidPackageKey, + DuplicatePackagePath, + }; + + pub fn insert(this: *Map, allocator: std.mem.Allocator, pkg_path: string, id: PackageID) InsertError!void { + var iter = PkgPath.iterator(pkg_path); + + var parent: ?*Node = null; + var curr: *Node = &this.root; + while (try iter.next()) |name| { + const entry = try curr.nodes.getOrPut(allocator, name); + if (!entry.found_existing) { + // probably should use String.Buf for small strings and + // deduplication. + entry.key_ptr.* = try allocator.dupe(u8, name); + entry.value_ptr.* = .{ + .pkg_id = invalid_package_id, + .dep_id = invalid_dependency_id, + .parent = parent, + .nodes = .{}, + }; + } + + parent = curr; + curr = entry.value_ptr; + } + + if (parent == null) { + return error.InvalidPackageKey; + } + + if (curr.pkg_id != invalid_package_id) { + return error.DuplicatePackagePath; + } + + curr.pkg_id = id; + } + + pub fn get(this: *Map, pkg_path: string) error{InvalidPackageKey}!?*Node { + var iter = iterator(pkg_path); + var curr: *Node = &this.root; + while (try iter.next()) |name| { + curr = curr.nodes.getPtr(name) orelse return null; + } + + return curr; + } + + pub fn iterate(this: *const Map, allocator: std.mem.Allocator) OOM!Map.Iterator { + var tree_buf: std.ArrayListUnmanaged(Map.Iterator.TreeInfo) = .{}; + try tree_buf.append(allocator, .{ + .nodes = this.root.nodes, + .pkg_id = 0, + .dep_id = BinaryLockfile.Tree.root_dep_id, + .id = 0, + .parent_id = BinaryLockfile.Tree.invalid_id, + }); + return .{ + .tree_buf = tree_buf, + .deps_buf = .{}, + }; + } + + /// Breadth-first iterator + pub const Iterator = struct { + tree_buf: std.ArrayListUnmanaged(TreeInfo), + + deps_buf: std.ArrayListUnmanaged(DependencyID), + + pub const TreeInfo = struct { + // name: String, + nodes: Nodes, + pkg_id: PackageID, + dep_id: DependencyID, + id: BinaryLockfile.Tree.Id, + parent_id: BinaryLockfile.Tree.Id, + }; + + pub const Next = struct { + id: BinaryLockfile.Tree.Id, + parent_id: BinaryLockfile.Tree.Id, + tree_dep_id: DependencyID, + dep_ids: []const DependencyID, + }; + + pub fn deinit(this: *Map.Iterator, allocator: std.mem.Allocator) void { + this.tree_buf.deinit(allocator); + this.deps_buf.deinit(allocator); + } + + pub fn next(this: *Map.Iterator, allocator: std.mem.Allocator) OOM!?Next { + if (this.tree_buf.items.len == 0) { + return null; + } + + this.deps_buf.clearRetainingCapacity(); + + var next_id = this.tree_buf.getLast().id + 1; + + // TODO(dylan-conway): try doubly linked list + const tree = this.tree_buf.orderedRemove(0); + + for (tree.nodes.values()) |node| { + if (node.nodes.count() > 0) { + try this.tree_buf.append(allocator, .{ + .nodes = node.nodes, + .id = next_id, + .parent_id = tree.id, + .pkg_id = node.pkg_id, + .dep_id = node.dep_id, + }); + next_id += 1; + } + + try this.deps_buf.append(allocator, node.dep_id); + } + + return .{ + .id = tree.id, + .parent_id = tree.parent_id, + .tree_dep_id = tree.dep_id, + .dep_ids = this.deps_buf.items, + }; + + // return tree; + // .dep_id = tree.dep_id, + // .pkg_id = tree.pkg_id, + // .id = tree.tree_id, + // .parent_id = tree.parent_id, + // .nodes = tree.nodes, + // }; + } + }; + }; +}; + +pub const Version = enum(u32) { + v0 = 0, + + // probably bump when we support nested resolutions + // v1, + + pub const current: Version = .v0; +}; + +pub const Stringifier = struct { + const indent_scalar = 2; + + // pub fn save(this: *const Lockfile) void { + // _ = this; + // } + + pub fn saveFromBinary(allocator: std.mem.Allocator, lockfile: *const BinaryLockfile) OOM!string { + var writer_buf = MutableString.initEmpty(allocator); + var buffered_writer = writer_buf.bufferedWriter(); + var writer = buffered_writer.writer(); + + const buf = lockfile.buffers.string_bytes.items; + const deps_buf = lockfile.buffers.dependencies.items; + const resolution_buf = lockfile.buffers.resolutions.items; + const pkgs = lockfile.packages.slice(); + const pkg_dep_lists: []DependencySlice = pkgs.items(.dependencies); + const pkg_resolution: []Resolution = pkgs.items(.resolution); + const pkg_names: []String = pkgs.items(.name); + const pkg_name_hashes: []PackageNameHash = pkgs.items(.name_hash); + const pkg_metas: []BinaryLockfile.Package.Meta = pkgs.items(.meta); + + var temp_buf: std.ArrayListUnmanaged(u8) = .{}; + defer temp_buf.deinit(allocator); + const temp_writer = temp_buf.writer(allocator); + + var found_trusted_dependencies: std.AutoHashMapUnmanaged(u64, String) = .{}; + defer found_trusted_dependencies.deinit(allocator); + if (lockfile.trusted_dependencies) |trusted_dependencies| { + try found_trusted_dependencies.ensureTotalCapacity(allocator, @truncate(trusted_dependencies.count())); + } + + var found_patched_dependencies: std.AutoHashMapUnmanaged(u64, struct { string, String }) = .{}; + defer found_patched_dependencies.deinit(allocator); + try found_patched_dependencies.ensureTotalCapacity(allocator, @truncate(lockfile.patched_dependencies.count())); + + var found_overrides: std.AutoHashMapUnmanaged(u64, struct { String, Dependency.Version }) = .{}; + defer found_overrides.deinit(allocator); + try found_overrides.ensureTotalCapacity(allocator, @truncate(lockfile.overrides.map.count())); + + var _indent: u32 = 0; + const indent = &_indent; + try writer.writeAll("{\n"); + try incIndent(writer, indent); + { + try writer.print("\"lockfileVersion\": {d},\n", .{@intFromEnum(Version.current)}); + try writeIndent(writer, indent); + + try writer.writeAll("\"workspaces\": {\n"); + try incIndent(writer, indent); + { + try writeWorkspaceDeps( + writer, + indent, + 0, + .{}, + pkg_names, + pkg_name_hashes, + pkg_dep_lists, + buf, + deps_buf, + lockfile.workspace_versions, + ); + for (0..pkgs.len) |pkg_id| { + const res = pkg_resolution[pkg_id]; + if (res.tag != .workspace) continue; + try writer.writeAll(",\n"); + try writeIndent(writer, indent); + try writeWorkspaceDeps( + writer, + indent, + @intCast(pkg_id), + res.value.workspace, + pkg_names, + pkg_name_hashes, + pkg_dep_lists, + buf, + deps_buf, + lockfile.workspace_versions, + ); + } + } + try writer.writeByte('\n'); + try decIndent(writer, indent); + try writer.writeAll("},\n"); + + var pkgs_iter = BinaryLockfile.Tree.Iterator(.pkg_path).init(lockfile); + + // find trusted and patched dependencies. also overrides + while (pkgs_iter.next({})) |node| { + for (node.dependencies) |dep_id| { + const pkg_id = resolution_buf[dep_id]; + if (pkg_id == invalid_package_id) continue; + + const pkg_name = pkg_names[pkg_id]; + const pkg_name_hash = pkg_name_hashes[pkg_id]; + const res = pkg_resolution[pkg_id]; + const dep = deps_buf[dep_id]; + + if (lockfile.patched_dependencies.count() > 0) { + try temp_writer.print("{s}@", .{pkg_name.slice(buf)}); + switch (res.tag) { + .workspace => { + if (lockfile.workspace_versions.get(pkg_name_hash)) |workspace_version| { + try temp_writer.print("{}", .{workspace_version.fmt(buf)}); + } + }, + else => { + try temp_writer.print("{}", .{res.fmt(buf, .posix)}); + }, + } + defer temp_buf.clearRetainingCapacity(); + + const name_and_version = temp_buf.items; + const name_and_version_hash = String.Builder.stringHash(name_and_version); + + if (lockfile.patched_dependencies.get(name_and_version_hash)) |patch| { + try found_patched_dependencies.put(allocator, name_and_version_hash, .{ + try allocator.dupe(u8, name_and_version), + patch.path, + }); + } + } + + // intentionally not checking default trusted dependencies + if (lockfile.trusted_dependencies) |trusted_dependencies| { + if (trusted_dependencies.contains(@truncate(dep.name_hash))) { + try found_trusted_dependencies.put(allocator, dep.name_hash, dep.name); + } + } + + if (lockfile.overrides.map.count() > 0) { + if (lockfile.overrides.get(dep.name_hash)) |version| { + try found_overrides.put(allocator, dep.name_hash, .{ dep.name, version }); + } + } + } + } + + pkgs_iter.reset(); + + if (found_trusted_dependencies.count() > 0) { + try writeIndent(writer, indent); + try writer.writeAll( + \\"trustedDependencies": [ + \\ + ); + indent.* += 1; + var values_iter = found_trusted_dependencies.valueIterator(); + while (values_iter.next()) |dep_name| { + try writeIndent(writer, indent); + try writer.print( + \\"{s}", + \\ + , .{dep_name.slice(buf)}); + } + + try decIndent(writer, indent); + try writer.writeAll( + \\], + \\ + ); + } + + if (found_patched_dependencies.count() > 0) { + try writeIndent(writer, indent); + try writer.writeAll( + \\"patchedDependencies": { + \\ + ); + indent.* += 1; + var values_iter = found_patched_dependencies.valueIterator(); + while (values_iter.next()) |value| { + const name_and_version, const patch_path = value.*; + try writeIndent(writer, indent); + try writer.print( + \\"{s}": "{s}", + \\ + , .{ name_and_version, patch_path.slice(buf) }); + } + + try decIndent(writer, indent); + try writer.writeAll( + \\}, + \\ + ); + } + + if (found_overrides.count() > 0) { + try writeIndent(writer, indent); + try writer.writeAll( + \\"overrides": { + \\ + ); + indent.* += 1; + var values_iter = found_overrides.valueIterator(); + while (values_iter.next()) |value| { + const name, const version = value.*; + try writeIndent(writer, indent); + try writer.print( + \\"{s}": "{s}", + \\ + , .{ name.slice(buf), version.literal.slice(buf) }); + } + + try decIndent(writer, indent); + try writer.writeAll( + \\}, + \\ + ); + } + + try writeIndent(writer, indent); + try writer.writeAll("\"packages\": {"); + var first = true; + while (pkgs_iter.next({})) |node| { + for (node.dependencies) |dep_id| { + const pkg_id = resolution_buf[dep_id]; + if (pkg_id == invalid_package_id) continue; + + const res = pkg_resolution[pkg_id]; + switch (res.tag) { + .root, .npm, .folder, .local_tarball, .github, .git, .symlink, .workspace, .remote_tarball => {}, + .uninitialized => continue, + // should not be possible, just being safe + .single_file_module => continue, + else => continue, + } + + if (first) { + first = false; + try writer.writeByte('\n'); + try incIndent(writer, indent); + } else { + try writer.writeAll(",\n"); + try writeIndent(writer, indent); + } + + try writer.writeByte('"'); + // relative_path is empty string for root resolutions + try writer.writeAll(node.relative_path); + + if (node.depth != 0) { + try writer.writeByte('/'); + } + + const dep = deps_buf[dep_id]; + const dep_name = dep.name.slice(buf); + + try writer.print("{s}\": ", .{ + dep_name, + }); + + const pkg_name = pkg_names[pkg_id].slice(buf); + const pkg_meta = pkg_metas[pkg_id]; + const pkg_deps = pkg_dep_lists[pkg_id].get(deps_buf); + + // first index is resolution for all dependency types + // npm -> [ "name@version", registry or "" (default), deps..., integrity, ... ] + // symlink -> [ "name@link:path", deps..., ... ] + // folder -> [ "name@path", deps..., ... ] + // workspace -> [ "name@workspace:path", version or "", deps..., ... ] + // tarball -> [ "name@tarball", deps..., ... ] + // root -> [ "name@root:" ] + // git -> [ "name@git+repo", deps..., ... ] + // github -> [ "name@github:user/repo", deps..., ... ] + + var optional_peers_buf = std.ArrayList(String).init(allocator); + defer optional_peers_buf.deinit(); + + switch (res.tag) { + .root => { + try writer.print("[\"{}@root:\"]", .{ + bun.fmt.formatJSONStringUTF8(pkg_name, .{ .quote = false }), + // we don't read the root package version into the binary lockfile + }); + }, + .folder => { + try writer.print("[\"{s}@file:{}\", ", .{ + pkg_name, + bun.fmt.formatJSONStringUTF8(res.value.folder.slice(buf), .{ .quote = false }), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.print(", \"{}\"]", .{pkg_meta.integrity}); + }, + .local_tarball => { + try writer.print("[\"{s}@{}\", ", .{ + pkg_name, + bun.fmt.formatJSONStringUTF8(res.value.local_tarball.slice(buf), .{ .quote = false }), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.print(", \"{}\"]", .{pkg_meta.integrity}); + }, + .remote_tarball => { + try writer.print("[\"{s}@{}\", ", .{ + pkg_name, + bun.fmt.formatJSONStringUTF8(res.value.remote_tarball.slice(buf), .{ .quote = false }), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.print(", \"{}\"]", .{pkg_meta.integrity}); + }, + .symlink => { + try writer.print("[\"{s}@link:{}\", ", .{ + pkg_name, + bun.fmt.formatJSONStringUTF8(res.value.symlink.slice(buf), .{ .quote = false }), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.print(", \"{}\"]", .{pkg_meta.integrity}); + }, + .npm => { + try writer.print("[\"{s}@{}\", ", .{ + pkg_name, + res.value.npm.version.fmt(buf), + }); + + // only write the registry if it's not the default. empty string means default registry + try writer.print("\"{s}\", ", .{ + if (strings.hasPrefixComptime(res.value.npm.url.slice(buf), strings.withoutTrailingSlash(Npm.Registry.default_url))) + "" + else + res.value.npm.url.slice(buf), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + // TODO(dylan-conway): delete placeholder + try writer.print(", \"{}\"]", .{ + pkg_meta.integrity, + }); + }, + .workspace => { + const workspace_path = res.value.workspace.slice(buf); + + try writer.print("[\"{s}@workspace:{}\", ", .{ + pkg_name, + bun.fmt.formatJSONStringUTF8(workspace_path, .{ .quote = false }), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.writeByte(']'); + }, + inline .git, .github => |tag| { + const repo: Repository = @field(res.value, @tagName(tag)); + try writer.print("[\"{s}@{}\", ", .{ + pkg_name, + repo.fmt(if (comptime tag == .git) "git+" else "github:", buf), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.print(", \"{}\"]", .{pkg_meta.integrity}); + }, + else => unreachable, + } + } + } + + if (!first) { + try writer.writeByte('\n'); + try decIndent(writer, indent); + } + try writer.writeAll("}\n"); + } + try decIndent(writer, indent); + try writer.writeAll("}\n"); + + try buffered_writer.flush(); + return writer_buf.list.items; + } + + /// Writes a single line object. + /// { "devDependencies": { "one": "1.1.1", "two": "2.2.2" }, "os": "none" } + fn writePackageDepsAndMeta( + writer: anytype, + deps: []const Dependency, + meta: *const Meta, + buf: string, + optional_peers_buf: *std.ArrayList(String), + ) OOM!void { + defer optional_peers_buf.clearRetainingCapacity(); + + try writer.writeByte('{'); + + var any = false; + inline for (workspace_dependency_groups) |group| { + const group_name, const group_behavior = group; + + var first = true; + for (deps) |dep| { + if (!dep.behavior.includes(group_behavior)) continue; + + if (dep.behavior.isOptionalPeer()) { + // only write to "peerDependencies" + if (group_behavior.isOptional()) continue; + + try optional_peers_buf.append(dep.name); + } + + if (first) { + if (any) { + try writer.writeByte(','); + } + try writer.writeAll(" \"" ++ group_name ++ "\": { "); + first = false; + any = true; + } else { + try writer.writeAll(", "); + } + + try writer.print("\"{s}\": \"{s}\"", .{ + dep.name.slice(buf), + dep.version.literal.slice(buf), + }); + } + + if (!first) { + try writer.writeAll(" }"); + } + } + + if (optional_peers_buf.items.len > 0) { + bun.debugAssert(any); + try writer.writeAll( + \\, "optionalPeers": [ + ); + + for (optional_peers_buf.items, 0..) |optional_peer, i| { + try writer.print( + \\"{s}{s}{s}", + , .{ + if (i != 0) "," else "", + optional_peer.slice(buf), + if (i != optional_peers_buf.items.len) " " else "", + }); + } + + try writer.writeByte(']'); + } + + // TODO(dylan-conway) + // if (meta.libc != .all) { + // try writer.writeAll( + // \\"libc": [ + // ); + // try Negatable(Npm.Libc).toJson(meta.libc, writer); + // try writer.writeAll("], "); + // } + + if (meta.os != .all) { + if (any) { + try writer.writeByte(','); + } else { + any = true; + } + try writer.writeAll( + \\ "os": + ); + try Negatable(Npm.OperatingSystem).toJson(meta.os, writer); + } + + if (meta.arch != .all) { + if (any) { + try writer.writeByte(','); + } else { + any = true; + } + try writer.writeAll( + \\ "cpu": + ); + try Negatable(Npm.Architecture).toJson(meta.arch, writer); + } + + if (any) { + try writer.writeAll(" }"); + } else { + try writer.writeByte('}'); + } + } + + fn writeWorkspaceDeps( + writer: anytype, + indent: *u32, + pkg_id: PackageID, + res: String, + pkg_names: []const String, + pkg_name_hashes: []const PackageNameHash, + pkg_deps: []const DependencySlice, + buf: string, + deps_buf: []const Dependency, + workspace_versions: BinaryLockfile.VersionHashMap, + ) OOM!void { + // any - have any properties been written + var any = false; + + // always print the workspace key even if it doesn't have dependencies because we + // need a way to detect new/deleted workspaces + if (pkg_id == 0) { + try writer.writeAll("\"\": {"); + } else { + try writer.print("{}: {{", .{ + bun.fmt.formatJSONStringUTF8(res.slice(buf), .{}), + }); + try writer.writeByte('\n'); + try incIndent(writer, indent); + try writer.print("\"name\": \"{s}\"", .{ + pkg_names[pkg_id].slice(buf), + }); + + if (workspace_versions.get(pkg_name_hashes[pkg_id])) |version| { + try writer.writeAll(",\n"); + try writeIndent(writer, indent); + try writer.print("\"version\": \"{}\"", .{ + version.fmt(buf), + }); + } + + any = true; + } + + inline for (workspace_dependency_groups) |group| { + const group_name, const group_behavior = group; + + var first = true; + for (pkg_deps[pkg_id].get(deps_buf)) |dep| { + if (!dep.behavior.includes(group_behavior)) continue; + + if (first) { + if (any) { + try writer.writeByte(','); + } + try writer.writeByte('\n'); + if (any) { + try writeIndent(writer, indent); + } else { + try incIndent(writer, indent); + } + try writer.writeAll("\"" ++ group_name ++ "\": {\n"); + try incIndent(writer, indent); + any = true; + first = false; + } else { + try writer.writeAll(",\n"); + try writeIndent(writer, indent); + } + + const name = dep.name.slice(buf); + const version = dep.version.literal.slice(buf); + + try writer.print("\"{s}\": \"{s}\"", .{ name, version }); + } + + if (!first) { + try writer.writeByte('\n'); + try decIndent(writer, indent); + try writer.writeAll("}"); + } + } + if (any) { + try writer.writeByte('\n'); + try decIndent(writer, indent); + } + try writer.writeAll("}"); + } + + fn writeIndent(writer: anytype, indent: *const u32) OOM!void { + for (0..indent.*) |_| { + try writer.writeAll(" " ** indent_scalar); + } + } + + fn incIndent(writer: anytype, indent: *u32) OOM!void { + indent.* += 1; + for (0..indent.*) |_| { + try writer.writeAll(" " ** indent_scalar); + } + } + + fn decIndent(writer: anytype, indent: *u32) OOM!void { + indent.* -= 1; + for (0..indent.*) |_| { + try writer.writeAll(" " ** indent_scalar); + } + } +}; + +const dependency_groups = [3]struct { []const u8, Dependency.Behavior }{ + .{ "dependencies", Dependency.Behavior.normal }, + .{ "peerDependencies", Dependency.Behavior.normal }, + .{ "optionalDependencies", Dependency.Behavior.normal }, +}; + +const workspace_dependency_groups = [4]struct { []const u8, Dependency.Behavior }{ + .{ "dependencies", Dependency.Behavior.normal }, + .{ "devDependencies", Dependency.Behavior.dev }, + .{ "peerDependencies", Dependency.Behavior.peer }, + .{ "optionalDependencies", Dependency.Behavior.optional }, +}; + +const ParseError = OOM || error{ + InvalidLockfileVersion, + InvalidOptionalValue, + InvalidPeerValue, + InvalidDefaultRegistry, + InvalidPatchedDependencies, + InvalidPatchedDependency, + InvalidWorkspaceObject, + InvalidPackagesObject, + InvalidPackagesProp, + InvalidPackageKey, + InvalidPackageInfo, + InvalidPackageSpecifier, + InvalidSemver, + InvalidPackagesTree, + InvalidTrustedDependenciesSet, + InvalidOverridesObject, + InvalidDependencyName, + InvalidDependencyVersion, + InvalidPackageResolution, + UnexpectedResolution, +}; + +pub fn parseIntoBinaryLockfile( + lockfile: *BinaryLockfile, + allocator: std.mem.Allocator, + root: JSON.Expr, + source: *const logger.Source, + log: *logger.Log, + manager: ?*PackageManager, +) ParseError!void { + var temp_buf: std.ArrayListUnmanaged(u8) = .{}; + defer temp_buf.deinit(allocator); + + lockfile.initEmpty(allocator); + + const lockfile_version_expr = root.get("lockfileVersion") orelse { + try log.addError(source, root.loc, "Missing lockfile version"); + return error.InvalidLockfileVersion; + }; + + const lockfile_version: u32 = switch (lockfile_version_expr.data) { + .e_number => |num| @intFromFloat(std.math.divExact(f64, num.value, 1) catch return error.InvalidLockfileVersion), + else => return error.InvalidLockfileVersion, + }; + + lockfile.text_lockfile_version = std.meta.intToEnum(Version, lockfile_version) catch { + try log.addError(source, lockfile_version_expr.loc, "Unknown lockfile version"); + return error.InvalidLockfileVersion; + }; + + var string_buf = String.Buf.init(allocator); + + if (root.get("trustedDependencies")) |trusted_dependencies_expr| { + var trusted_dependencies: BinaryLockfile.TrustedDependenciesSet = .{}; + if (!trusted_dependencies_expr.isArray()) { + try log.addError(source, trusted_dependencies_expr.loc, "Expected an array"); + return error.InvalidTrustedDependenciesSet; + } + + for (trusted_dependencies_expr.data.e_array.items.slice()) |dep| { + if (!dep.isString()) { + try log.addError(source, dep.loc, "Expected a string"); + return error.InvalidTrustedDependenciesSet; + } + const name_hash: TruncatedPackageNameHash = @truncate((try dep.asStringHash(allocator, String.Builder.stringHash)).?); + try trusted_dependencies.put(allocator, name_hash, {}); + } + + lockfile.trusted_dependencies = trusted_dependencies; + } + + if (root.get("patchedDependencies")) |patched_dependencies_expr| { + if (!patched_dependencies_expr.isObject()) { + try log.addError(source, patched_dependencies_expr.loc, "Expected an object"); + return error.InvalidPatchedDependencies; + } + + for (patched_dependencies_expr.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + if (!key.isString()) { + try log.addError(source, key.loc, "Expected a string"); + return error.InvalidPatchedDependencies; + } + + if (!value.isString()) { + try log.addError(source, value.loc, "Expected a string"); + return error.InvalidPatchedDependencies; + } + + const key_hash = (try key.asStringHash(allocator, String.Builder.stringHash)).?; + try lockfile.patched_dependencies.put( + allocator, + key_hash, + .{ .path = try string_buf.append(value.asString(allocator).?) }, + ); + } + } + + if (root.get("overrides")) |overrides_expr| { + if (!overrides_expr.isObject()) { + try log.addError(source, overrides_expr.loc, "Expected an object"); + return error.InvalidOverridesObject; + } + + for (overrides_expr.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + + if (!key.isString() or key.data.e_string.len() == 0) { + try log.addError(source, key.loc, "Expected a non-empty string"); + return error.InvalidOverridesObject; + } + + const name_str = key.asString(allocator).?; + const name_hash = String.Builder.stringHash(name_str); + const name = try string_buf.appendWithHash(name_str, name_hash); + + // TODO(dylan-conway) also accept object when supported + if (!value.isString()) { + try log.addError(source, value.loc, "Expected a string"); + return error.InvalidOverridesObject; + } + + const version_str = value.asString(allocator).?; + const version_hash = String.Builder.stringHash(version_str); + const version = try string_buf.appendWithHash(version_str, version_hash); + const version_sliced = version.sliced(string_buf.bytes.items); + + const dep: Dependency = .{ + .name = name, + .name_hash = name_hash, + .version = Dependency.parse( + allocator, + name, + name_hash, + version_sliced.slice, + &version_sliced, + log, + manager, + ) orelse { + try log.addError(source, value.loc, "Invalid override version"); + return error.InvalidOverridesObject; + }, + }; + + try lockfile.overrides.map.put(allocator, name_hash, dep); + } + } + + const workspaces = root.getObject("workspaces") orelse { + try log.addError(source, root.loc, "Missing a workspaces object property"); + return error.InvalidWorkspaceObject; + }; + + var maybe_root_pkg: ?Expr = null; + + for (workspaces.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value: Expr = prop.value.?; + if (!key.isString()) { + try log.addError(source, key.loc, "Expected a string"); + return error.InvalidWorkspaceObject; + } + if (!value.isObject()) { + try log.addError(source, value.loc, "Expected an object"); + return error.InvalidWorkspaceObject; + } + + const path = key.asString(allocator).?; + + if (path.len == 0) { + if (maybe_root_pkg != null) { + try log.addError(source, key.loc, "Duplicate root package"); + return error.InvalidWorkspaceObject; + } + + maybe_root_pkg = value; + continue; + } + + const name_expr: Expr = value.get("name") orelse { + try log.addError(source, value.loc, "Expected a string name property"); + return error.InvalidWorkspaceObject; + }; + + const name_hash = try name_expr.asStringHash(allocator, String.Builder.stringHash) orelse { + try log.addError(source, name_expr.loc, "Expected a string name property"); + return error.InvalidWorkspaceObject; + }; + + try lockfile.workspace_paths.put(allocator, name_hash, try string_buf.append(path)); + + // versions are optional + if (value.get("version")) |version_expr| { + if (!version_expr.isString()) { + try log.addError(source, version_expr.loc, "Expected a string version property"); + return error.InvalidWorkspaceObject; + } + + const version_str = try string_buf.append(version_expr.asString(allocator).?); + + const parsed = Semver.Version.parse(version_str.sliced(string_buf.bytes.items)); + if (!parsed.valid) { + try log.addError(source, version_expr.loc, "Invalid semver version"); + return error.InvalidSemver; + } + + try lockfile.workspace_versions.put(allocator, name_hash, parsed.version.min()); + } + } + + var optional_peers_buf: std.AutoHashMapUnmanaged(u64, void) = .{}; + defer optional_peers_buf.deinit(allocator); + + if (maybe_root_pkg) |root_pkg| { + // TODO(dylan-conway): maybe sort this. behavior is already sorted, but names are not + const maybe_name = if (root_pkg.get("name")) |name| name.asString(allocator) orelse { + try log.addError(source, name.loc, "Expected a string"); + return error.InvalidWorkspaceObject; + } else null; + + const off, const len = try parseAppendDependencies(lockfile, allocator, &root_pkg, &string_buf, log, source, &optional_peers_buf); + + var pkg: BinaryLockfile.Package = .{}; + pkg.meta.id = 0; + + if (maybe_name) |name| { + const name_hash = String.Builder.stringHash(name); + pkg.name = try string_buf.appendWithHash(name, name_hash); + pkg.name_hash = name_hash; + } + + pkg.dependencies = .{ .off = off, .len = len }; + pkg.resolutions = .{ .off = off, .len = len }; + + try lockfile.packages.append(allocator, pkg); + } else { + try log.addError(source, workspaces.loc, "Expected root package"); + return error.InvalidWorkspaceObject; + } + + var pkg_map = PkgPath.Map.init(); + defer pkg_map.deinit(allocator); + + if (root.get("packages")) |pkgs_expr| { + if (!pkgs_expr.isObject()) { + try log.addError(source, pkgs_expr.loc, "Expected an object"); + return error.InvalidPackagesObject; + } + + for (pkgs_expr.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + + const pkg_path = key.asString(allocator) orelse { + try log.addError(source, key.loc, "Expected a string"); + return error.InvalidPackageKey; + }; + + if (!value.isArray()) { + try log.addError(source, value.loc, "Expected an array"); + return error.InvalidPackageInfo; + } + + var i: usize = 0; + const pkg_info = value.data.e_array.items; + + if (pkg_info.len == 0) { + try log.addError(source, value.loc, "Missing package info"); + return error.InvalidPackageInfo; + } + + const res_info = pkg_info.at(i); + i += 1; + + const res_info_str = res_info.asString(allocator) orelse { + try log.addError(source, res_info.loc, "Expected a string"); + return error.InvalidPackageResolution; + }; + + const name_str, const res_str = Dependency.splitNameAndVersion(res_info_str) catch { + try log.addError(source, res_info.loc, "Invalid package resolution"); + return error.InvalidPackageResolution; + }; + + const name_hash = String.Builder.stringHash(name_str); + const name = try string_buf.append(name_str); + + var res = Resolution.fromTextLockfile(res_str, &string_buf) catch |err| switch (err) { + error.OutOfMemory => return err, + error.UnexpectedResolution => { + try log.addErrorFmt(source, res_info.loc, allocator, "Unexpected resolution: {s}", .{res_str}); + return err; + }, + error.InvalidSemver => { + try log.addErrorFmt(source, res_info.loc, allocator, "Invalid package version: {s}", .{res_str}); + return err; + }, + }; + + if (res.tag == .npm) { + if (pkg_info.len < 2) { + try log.addError(source, value.loc, "Missing npm registry"); + return error.InvalidPackageInfo; + } + + const registry_expr = pkg_info.at(i); + i += 1; + + const registry_str = registry_expr.asString(allocator) orelse { + try log.addError(source, registry_expr.loc, "Expected a string"); + return error.InvalidPackageInfo; + }; + + if (registry_str.len == 0) { + const url = try ExtractTarball.buildURL( + Npm.Registry.default_url, + strings.StringOrTinyString.init(name.slice(string_buf.bytes.items)), + res.value.npm.version, + string_buf.bytes.items, + ); + + res.value.npm.url = try string_buf.append(url); + } else { + res.value.npm.url = try string_buf.append(registry_str); + } + } + + var pkg: BinaryLockfile.Package = .{}; + + // dependencies, os, cpu, libc + switch (res.tag) { + .npm, .folder, .git, .github, .local_tarball, .remote_tarball, .symlink, .workspace => { + const deps_os_cpu_libc_obj = pkg_info.at(i); + i += 1; + if (!deps_os_cpu_libc_obj.isObject()) { + try log.addError(source, deps_os_cpu_libc_obj.loc, "Expected an object"); + return error.InvalidPackageInfo; + } + + // TODO(dylan-conway): maybe sort this. behavior is already sorted, but names are not + const off, const len = try parseAppendDependencies(lockfile, allocator, deps_os_cpu_libc_obj, &string_buf, log, source, &optional_peers_buf); + + pkg.dependencies = .{ .off = off, .len = len }; + pkg.resolutions = .{ .off = off, .len = len }; + + if (res.tag != .workspace) { + if (deps_os_cpu_libc_obj.get("os")) |os| { + pkg.meta.os = try Negatable(Npm.OperatingSystem).fromJson(allocator, os); + } + if (deps_os_cpu_libc_obj.get("cpu")) |arch| { + pkg.meta.arch = try Negatable(Npm.Architecture).fromJson(allocator, arch); + } + // TODO(dylan-conway) + // if (os_cpu_libc_obj.get("libc")) |libc| { + // pkg.meta.libc = Negatable(Npm.Libc).fromJson(allocator, libc); + // } + } + }, + else => {}, + } + + // integrity + switch (res.tag) { + .npm, .git, .github => { + const integrity_expr = pkg_info.at(i); + i += 1; + const integrity_str = integrity_expr.asString(allocator) orelse { + try log.addError(source, integrity_expr.loc, "Expected a string"); + return error.InvalidPackageInfo; + }; + + pkg.meta.integrity = Integrity.parse(integrity_str); + }, + else => {}, + } + + pkg.name = name; + pkg.name_hash = name_hash; + pkg.resolution = res; + + // set later + pkg.bin = .{ + .unset = 1, + }; + pkg.scripts = .{}; + + const pkg_id = try lockfile.appendPackageDedupe(&pkg, string_buf.bytes.items); + + pkg_map.insert(allocator, pkg_path, pkg_id) catch |err| { + switch (err) { + error.OutOfMemory => |oom| return oom, + error.DuplicatePackagePath => { + try log.addError(source, key.loc, "Duplicate package path"); + }, + error.InvalidPackageKey => { + try log.addError(source, key.loc, "Invalid package path"); + }, + } + return error.InvalidPackageKey; + }; + } + + try lockfile.buffers.resolutions.ensureTotalCapacityPrecise(allocator, lockfile.buffers.dependencies.items.len); + lockfile.buffers.resolutions.expandToCapacity(); + @memset(lockfile.buffers.resolutions.items, invalid_package_id); + + const pkgs = lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + _ = pkg_names; + const pkg_name_hashes = pkgs.items(.name_hash); + _ = pkg_name_hashes; + const pkg_deps = pkgs.items(.dependencies); + var pkg_metas = pkgs.items(.meta); + var pkg_resolutions = pkgs.items(.resolution); + const pkg_resolution_lists = pkgs.items(.resolutions); + _ = pkg_resolution_lists; + + { + // root pkg + pkg_resolutions[0] = Resolution.init(.{ .root = {} }); + pkg_metas[0].origin = .local; + + for (pkg_deps[0].begin()..pkg_deps[0].end()) |_dep_id| { + const dep_id: DependencyID = @intCast(_dep_id); + const dep = lockfile.buffers.dependencies.items[dep_id]; + + if (pkg_map.root.nodes.getPtr(dep.name.slice(string_buf.bytes.items))) |dep_node| { + dep_node.dep_id = dep_id; + lockfile.buffers.resolutions.items[dep_id] = dep_node.pkg_id; + } + } + } + + for (pkgs_expr.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + + const pkg_path = key.asString(allocator).?; + const i: usize = 0; + _ = i; + const pkg_info = value.data.e_array.items; + _ = pkg_info; + + const pkg_map_entry = try pkg_map.get(pkg_path) orelse { + return error.InvalidPackagesObject; + }; + + const pkg_id = pkg_map_entry.pkg_id; + + // find resolutions. iterate up to root through the pkg path. + deps: for (pkg_deps[pkg_id].begin()..pkg_deps[pkg_id].end()) |_dep_id| { + const dep_id: DependencyID = @intCast(_dep_id); + const dep = lockfile.buffers.dependencies.items[dep_id]; + + var curr: ?*PkgPath.Map.Node = pkg_map_entry; + while (curr) |node| { + if (node.nodes.getPtr(dep.name.slice(string_buf.bytes.items))) |dep_node| { + + // it doesn't matter which dependency is assigned to this node. the dependency + // id will only be used for getting the dependency name + dep_node.dep_id = dep_id; + lockfile.buffers.resolutions.items[dep_id] = dep_node.pkg_id; + + continue :deps; + } + curr = node.parent orelse if (curr != &pkg_map.root) &pkg_map.root else null; + } + } + } + + { + // ids are assigned, now flatten into `lockfile.buffers.trees` and `lockfile.buffers.hoisted_dependencies` + var tree_iter = try pkg_map.iterate(allocator); + defer tree_iter.deinit(allocator); + var tree_id: BinaryLockfile.Tree.Id = 0; + while (try tree_iter.next(allocator)) |tree| { + bun.debugAssert(tree_id == tree.id); + const deps_off: u32 = @intCast(lockfile.buffers.hoisted_dependencies.items.len); + const deps_len: u32 = @intCast(tree.dep_ids.len); + try lockfile.buffers.hoisted_dependencies.appendSlice(allocator, tree.dep_ids); + try lockfile.buffers.trees.append( + allocator, + .{ + .dependency_id = tree.tree_dep_id, + .id = tree_id, + .parent = tree.parent_id, + .dependencies = .{ + .off = deps_off, + .len = deps_len, + }, + }, + ); + + tree_id += 1; + } + } + } + + lockfile.buffers.string_bytes = string_buf.bytes.moveToUnmanaged(); + lockfile.string_pool = string_buf.pool; +} + +fn parseAppendDependencies( + lockfile: *BinaryLockfile, + allocator: std.mem.Allocator, + obj: *const Expr, + buf: *String.Buf, + log: *logger.Log, + source: *const logger.Source, + optional_peers_buf: *std.AutoHashMapUnmanaged(u64, void), +) ParseError!struct { u32, u32 } { + defer optional_peers_buf.clearRetainingCapacity(); + + if (obj.get("optionalPeers")) |optional_peers| { + if (!optional_peers.isArray()) { + try log.addError(source, optional_peers.loc, "Expected an array"); + return error.InvalidPackageInfo; + } + + for (optional_peers.data.e_array.items.slice()) |item| { + const name_hash = try item.asStringHash(allocator, String.Builder.stringHash) orelse { + try log.addError(source, item.loc, "Expected a string"); + return error.InvalidPackageInfo; + }; + + try optional_peers_buf.put(allocator, name_hash, {}); + } + } + + const off = lockfile.buffers.dependencies.items.len; + inline for (workspace_dependency_groups) |dependency_group| { + const group_name, const group_behavior = dependency_group; + if (obj.get(group_name)) |deps| { + if (!deps.isObject()) { + try log.addError(source, deps.loc, "Expected an object"); + return error.InvalidPackagesTree; + } + + for (deps.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + + const name_str = key.asString(allocator) orelse { + try log.addError(source, key.loc, "Expected a string"); + return error.InvalidDependencyName; + }; + + const name_hash = String.Builder.stringHash(name_str); + const name = try buf.appendExternalWithHash(name_str, name_hash); + + const version_str = value.asString(allocator) orelse { + try log.addError(source, value.loc, "Expected a string"); + return error.InvalidDependencyVersion; + }; + + const version = try buf.append(version_str); + const version_sliced = version.sliced(buf.bytes.items); + + var dep: Dependency = .{ + .name = name.value, + .name_hash = name.hash, + .behavior = group_behavior, + .version = Dependency.parse( + allocator, + name.value, + name.hash, + version_sliced.slice, + &version_sliced, + log, + null, + ) orelse { + try log.addError(source, value.loc, "Invalid dependency version"); + return error.InvalidDependencyVersion; + }, + }; + + if (dep.behavior.isPeer() and optional_peers_buf.contains(name.hash)) { + dep.behavior.optional = true; + } + + try lockfile.buffers.dependencies.append(allocator, dep); + } + } + } + const end = lockfile.buffers.dependencies.items.len; + + return .{ @intCast(off), @intCast(end - off) }; +} diff --git a/src/install/dependency.zig b/src/install/dependency.zig index 6f3667988e4f7a..faeb476d2d128d 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -265,7 +265,7 @@ pub inline fn isRemoteTarball(dependency: string) bool { } /// Turns `foo@1.1.1` into `foo`, `1.1.1`, or `@foo/bar@1.1.1` into `@foo/bar`, `1.1.1`, or `foo` into `foo`, `null`. -pub fn splitNameAndVersion(str: string) struct { string, ?string } { +pub fn splitNameAndMaybeVersion(str: string) struct { string, ?string } { if (strings.indexOfChar(str, '@')) |at_index| { if (at_index != 0) { return .{ str[0..at_index], if (at_index + 1 < str.len) str[at_index + 1 ..] else null }; @@ -279,6 +279,14 @@ pub fn splitNameAndVersion(str: string) struct { string, ?string } { return .{ str, null }; } +pub fn splitNameAndVersion(str: string) error{MissingVersion}!struct { string, string } { + const name, const version = splitNameAndMaybeVersion(str); + return .{ + name, + version orelse return error.MissingVersion, + }; +} + pub fn unscopedPackageName(name: []const u8) []const u8 { if (name[0] != '@') return name; var name_ = name; @@ -1375,6 +1383,10 @@ pub const Behavior = packed struct(u8) { return @as(u8, @bitCast(lhs)) == @as(u8, @bitCast(rhs)); } + pub inline fn includes(lhs: Behavior, rhs: Behavior) bool { + return @as(u8, @bitCast(lhs)) & @as(u8, @bitCast(rhs)) != 0; + } + pub inline fn cmp(lhs: Behavior, rhs: Behavior) std.math.Order { if (eq(lhs, rhs)) { return .eq; diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index 8ca72a1fc85472..8959383769515d 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -18,6 +18,7 @@ const strings = @import("../string_immutable.zig"); const Path = @import("../resolver/resolve_path.zig"); const Environment = bun.Environment; const w = std.os.windows; +const OOM = bun.OOM; const ExtractTarball = @This(); @@ -60,43 +61,12 @@ pub fn buildURL( string_buf, @TypeOf(FileSystem.instance.dirname_store), string, - anyerror, + OOM, FileSystem.instance.dirname_store, FileSystem.DirnameStore.print, ); } -pub fn buildURLWithWriter( - comptime Writer: type, - writer: Writer, - registry_: string, - full_name_: strings.StringOrTinyString, - version: Semver.Version, - string_buf: []const u8, -) !void { - const Printer = struct { - writer: Writer, - - pub fn print(this: @This(), comptime fmt: string, args: anytype) Writer.Error!void { - return try std.fmt.format(this.writer, fmt, args); - } - }; - - return try buildURLWithPrinter( - registry_, - full_name_, - version, - string_buf, - Printer, - void, - Writer.Error, - Printer{ - .writer = writer, - }, - Printer.print, - ); -} - pub fn buildURLWithPrinter( registry_: string, full_name_: strings.StringOrTinyString, diff --git a/src/install/install.zig b/src/install/install.zig index d1b14b2ae6fe31..2ff59a506036b2 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -24,6 +24,7 @@ const DirInfo = @import("../resolver/dir_info.zig"); const File = bun.sys.File; const JSLexer = bun.js_lexer; const logger = bun.logger; +const OOM = bun.OOM; const js_parser = bun.js_parser; const JSON = bun.JSON; @@ -147,7 +148,7 @@ const ExternalString = Semver.ExternalString; const String = Semver.String; const GlobalStringBuilder = @import("../string_builder.zig"); const SlicedString = Semver.SlicedString; -const Repository = @import("./repository.zig").Repository; +pub const Repository = @import("./repository.zig").Repository; pub const Bin = @import("./bin.zig").Bin; pub const Dependency = @import("./dependency.zig"); const Behavior = @import("./dependency.zig").Behavior; @@ -211,6 +212,7 @@ pub fn ExternalSliceAligned(comptime Type: type, comptime alignment_: ?u29) type pub const PackageID = u32; pub const DependencyID = u32; pub const invalid_package_id = std.math.maxInt(PackageID); +pub const invalid_dependency_id = std.math.maxInt(DependencyID); pub const ExternalStringList = ExternalSlice(ExternalString); pub const VersionSlice = ExternalSlice(Semver.Version); @@ -988,12 +990,12 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { progress: ProgressT, - package_name: string, + package_name: String, package_version: string, patch: Patch = .{}, file_count: u32 = 0, node_modules: *const PackageManager.NodeModulesFolder, - lockfile: *const Lockfile, + lockfile: *Lockfile, const ThisPackageInstall = @This(); @@ -1093,7 +1095,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { fn verifyPatchHash( this: *@This(), root_node_modules_dir: std.fs.Dir, - ) bool { + ) VerifyResult { bun.debugAssert(!this.patch.isNull()); // hash from the .patch file, to be checked against bun tag @@ -1106,20 +1108,22 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { bunhashtag, }, .posix); - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return .{}; defer { if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); } if (comptime bun.Environment.isPosix) { - _ = bun.sys.fstatat(bun.toFD(destination_dir.fd), patch_tag_path).unwrap() catch return false; + _ = bun.sys.fstatat(bun.toFD(destination_dir.fd), patch_tag_path).unwrap() catch return .{}; } else { switch (bun.sys.openat(bun.toFD(destination_dir.fd), patch_tag_path, bun.O.RDONLY, 0)) { - .err => return false, + .err => return .{}, .result => |fd| _ = bun.sys.close(fd), } } - return true; + return .{ + .valid = true, + }; } // 1. verify that .bun-tag exists (was it installed from bun?) @@ -1127,9 +1131,8 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { fn verifyGitResolution( this: *@This(), repo: *const Repository, - buf: []const u8, root_node_modules_dir: std.fs.Dir, - ) bool { + ) VerifyResult { bun.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ ".bun-tag"); this.destination_dir_subpath_buf[this.destination_dir_subpath.len + std.fs.path.sep_str.len + ".bun-tag".len] = 0; const bun_tag_path: [:0]u8 = this.destination_dir_subpath_buf[0 .. this.destination_dir_subpath.len + std.fs.path.sep_str.len + ".bun-tag".len :0]; @@ -1137,7 +1140,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { var git_tag_stack_fallback = std.heap.stackFallback(2048, bun.default_allocator); const allocator = git_tag_stack_fallback.get(); - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return .{}; defer { if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); } @@ -1146,120 +1149,138 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { destination_dir, bun_tag_path, allocator, - ).unwrap() catch return false; + ).unwrap() catch return .{}; defer allocator.free(bun_tag_file); - return strings.eqlLong(repo.resolved.slice(buf), bun_tag_file, true); + return .{ + .valid = strings.eqlLong(repo.resolved.slice(this.lockfile.buffers.string_bytes.items), bun_tag_file, true), + }; } pub fn verify( this: *@This(), resolution: *const Resolution, - buf: []const u8, root_node_modules_dir: std.fs.Dir, - ) bool { + bin: *Bin, + ) VerifyResult { const verified = switch (resolution.tag) { - .git => this.verifyGitResolution(&resolution.value.git, buf, root_node_modules_dir), - .github => this.verifyGitResolution(&resolution.value.github, buf, root_node_modules_dir), + .git => this.verifyGitResolution(&resolution.value.git, root_node_modules_dir), + .github => this.verifyGitResolution(&resolution.value.github, root_node_modules_dir), .root => this.verifyTransitiveSymlinkedFolder(root_node_modules_dir), .folder => if (this.lockfile.isWorkspaceTreeId(this.node_modules.tree_id)) - this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag) + this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag, bin) else this.verifyTransitiveSymlinkedFolder(root_node_modules_dir), - else => this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag), + else => this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag, bin), }; if (comptime kind == .patch) return verified; if (this.patch.isNull()) return verified; - if (!verified) return false; + if (!verified.valid) return verified; return this.verifyPatchHash(root_node_modules_dir); } // Only check for destination directory in node_modules. We can't use package.json because // it might not exist - fn verifyTransitiveSymlinkedFolder(this: *@This(), root_node_modules_dir: std.fs.Dir) bool { - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; + fn verifyTransitiveSymlinkedFolder(this: *@This(), root_node_modules_dir: std.fs.Dir) VerifyResult { + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return .{}; defer destination_dir.close(); - return bun.sys.directoryExistsAt(destination_dir.fd, this.destination_dir_subpath).unwrap() catch false; + const exists = bun.sys.directoryExistsAt(destination_dir.fd, this.destination_dir_subpath).unwrap() catch return .{}; + return if (exists) .{ .valid = true } else .{}; } - fn verifyPackageJSONNameAndVersion(this: *PackageInstall, root_node_modules_dir: std.fs.Dir, resolution_tag: Resolution.Tag) bool { - const allocator = this.allocator; + const VerifyResult = struct { + valid: bool = false, + update_lockfile_pointers: bool = false, + }; + + fn getInstalledPackageJsonSource( + this: *PackageInstall, + root_node_modules_dir: std.fs.Dir, + mutable: *MutableString, + resolution_tag: Resolution.Tag, + ) ?logger.Source { var total: usize = 0; var read: usize = 0; + mutable.reset(); + mutable.list.expandToCapacity(); + bun.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ "package.json"); + this.destination_dir_subpath_buf[this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len] = 0; + const package_json_path: [:0]u8 = this.destination_dir_subpath_buf[0 .. this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len :0]; + defer this.destination_dir_subpath_buf[this.destination_dir_subpath.len] = 0; - var body_pool = Npm.Registry.BodyPool.get(allocator); - var mutable: MutableString = body_pool.data; + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return null; defer { - body_pool.data = mutable; - Npm.Registry.BodyPool.release(body_pool); + if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); } - // Read the file - // Return false on any error. - // Don't keep it open while we're parsing the JSON. - // The longer the file stays open, the more likely it causes issues for - // other processes on Windows. - const source = brk: { - mutable.reset(); + var package_json_file = File.openat(destination_dir, package_json_path, bun.O.RDONLY, 0).unwrap() catch return null; + defer package_json_file.close(); + + // Heuristic: most package.jsons will be less than 2048 bytes. + read = package_json_file.read(mutable.list.items[total..]).unwrap() catch return null; + var remain = mutable.list.items[@min(total, read)..]; + if (read > 0 and remain.len < 1024) { + mutable.growBy(4096) catch return null; mutable.list.expandToCapacity(); - bun.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ "package.json"); - this.destination_dir_subpath_buf[this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len] = 0; - const package_json_path: [:0]u8 = this.destination_dir_subpath_buf[0 .. this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len :0]; - defer this.destination_dir_subpath_buf[this.destination_dir_subpath.len] = 0; + } - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; - defer { - if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); - } + while (read > 0) : (read = package_json_file.read(remain).unwrap() catch return null) { + total += read; - var package_json_file = File.openat(destination_dir, package_json_path, bun.O.RDONLY, 0).unwrap() catch return false; - defer package_json_file.close(); + mutable.list.expandToCapacity(); + remain = mutable.list.items[total..]; - // Heuristic: most package.jsons will be less than 2048 bytes. - read = package_json_file.read(mutable.list.items[total..]).unwrap() catch return false; - var remain = mutable.list.items[@min(total, read)..]; - if (read > 0 and remain.len < 1024) { - mutable.growBy(4096) catch return false; - mutable.list.expandToCapacity(); + if (remain.len < 1024) { + mutable.growBy(4096) catch return null; } + mutable.list.expandToCapacity(); + remain = mutable.list.items[total..]; + } - while (read > 0) : (read = package_json_file.read(remain).unwrap() catch return false) { - total += read; - - mutable.list.expandToCapacity(); - remain = mutable.list.items[total..]; + // If it's not long enough to have {"name": "foo", "version": "1.2.0"}, there's no way it's valid + const minimum = if (resolution_tag == .workspace and this.package_version.len == 0) + // workspaces aren't required to have a version + "{\"name\":\"\"}".len + this.package_name.len() + else + "{\"name\":\"\",\"version\":\"\"}".len + this.package_name.len() + this.package_version.len; - if (remain.len < 1024) { - mutable.growBy(4096) catch return false; - } - mutable.list.expandToCapacity(); - remain = mutable.list.items[total..]; - } + if (total < minimum) return null; - // If it's not long enough to have {"name": "foo", "version": "1.2.0"}, there's no way it's valid - const minimum = if (resolution_tag == .workspace and this.package_version.len == 0) - // workspaces aren't required to have a version - "{\"name\":\"\"}".len + this.package_name.len - else - "{\"name\":\"\",\"version\":\"\"}".len + this.package_name.len + this.package_version.len; + return logger.Source.initPathString(bun.span(package_json_path), mutable.list.items[0..total]); + } - if (total < minimum) return false; + fn verifyPackageJSONNameAndVersion(this: *PackageInstall, root_node_modules_dir: std.fs.Dir, resolution_tag: Resolution.Tag, bin: *Bin) VerifyResult { + var body_pool = Npm.Registry.BodyPool.get(this.allocator); + var mutable: MutableString = body_pool.data; + defer { + body_pool.data = mutable; + Npm.Registry.BodyPool.release(body_pool); + } - break :brk logger.Source.initPathString(bun.span(package_json_path), mutable.list.items[0..total]); - }; + // Read the file + // Return false on any error. + // Don't keep it open while we're parsing the JSON. + // The longer the file stays open, the more likely it causes issues for + // other processes on Windows. + const source = this.getInstalledPackageJsonSource(root_node_modules_dir, &mutable, resolution_tag) orelse return .{}; - var log = logger.Log.init(allocator); + var log = logger.Log.init(this.allocator); defer log.deinit(); initializeStore(); - var package_json_checker = JSON.PackageJSONVersionChecker.init(allocator, &source, &log) catch return false; - _ = package_json_checker.parseExpr() catch return false; - if (log.errors > 0 or !package_json_checker.has_found_name) return false; + var package_json_checker = JSON.PackageJSONVersionChecker.init( + this.allocator, + &source, + &log, + if (bin.isUnset()) .check_for_bin else .ignore_bin, + ) catch return .{}; + _ = package_json_checker.parseExpr(false, false) catch return .{}; + if (log.errors > 0 or !package_json_checker.has_found_name) return .{}; // workspaces aren't required to have a version - if (!package_json_checker.has_found_version and resolution_tag != .workspace) return false; + if (!package_json_checker.has_found_version and resolution_tag != .workspace) return .{}; const found_version = package_json_checker.found_version; @@ -1292,14 +1313,40 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { } // If we didn't find any of these characters, there's no point in checking the version again. // it will never match. - return false; + return .{}; }; - if (!strings.eql(found_version[offset..], this.package_version)) return false; + if (!strings.eql(found_version[offset..], this.package_version)) return .{}; } // lastly, check the name. - return strings.eql(package_json_checker.found_name, this.package_name); + if (strings.eql(package_json_checker.found_name, this.package_name.slice(this.lockfile.buffers.string_bytes.items))) { + // only want to set bins if up-to-date + if (bin.isUnset() and package_json_checker.has_found_bin) { + var string_buf = this.lockfile.stringBuf(); + defer string_buf.apply(this.lockfile); + + switch (package_json_checker.found_bin) { + .bin => |expr| { + bin.* = Bin.parseAppend(this.lockfile.allocator, expr, &string_buf, &this.lockfile.buffers.extern_strings) catch bun.outOfMemory(); + }, + .dir => |expr| { + bin.* = Bin.parseAppendFromDirectories(this.lockfile.allocator, expr, &string_buf) catch bun.outOfMemory(); + }, + } + + return .{ + .valid = true, + .update_lockfile_pointers = true, + }; + } + + return .{ + .valid = true, + }; + } + + return .{}; } pub const Result = union(Tag) { @@ -2952,7 +2999,7 @@ pub const PackageManager = struct { pub const LifecycleScriptTimeLog = struct { const Entry = struct { - package_name: []const u8, + package_name: string, script_id: u8, // nanosecond duration @@ -4500,7 +4547,7 @@ pub const PackageManager = struct { if (this.lockfile.package_index.get(name_hash)) |index| { const resolutions: []Resolution = this.lockfile.packages.items(.resolution); switch (index) { - .PackageID => |existing_id| { + .id => |existing_id| { if (existing_id < resolutions.len) { const existing_resolution = resolutions[existing_id]; if (this.resolutionSatisfiesDependency(existing_resolution, version)) { @@ -4533,7 +4580,7 @@ pub const PackageManager = struct { } } }, - .PackageIDMultiple => |list| { + .ids => |list| { for (list.items) |existing_id| { if (existing_id < resolutions.len) { const existing_resolution = resolutions[existing_id]; @@ -4869,8 +4916,8 @@ pub const PackageManager = struct { .apply_patch_task = if (patch_name_and_version_hash) |h| brk: { const dep = dependency; const pkg_id = switch (this.lockfile.package_index.get(dep.name_hash) orelse @panic("Package not found")) { - .PackageID => |p| p, - .PackageIDMultiple => |ps| ps.items[0], // TODO is this correct + .id => |p| p, + .ids => |ps| ps.items[0], // TODO is this correct }; const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?; const pt = PatchTask.newApplyPatchHash(this, pkg_id, patch_hash, h); @@ -4924,8 +4971,8 @@ pub const PackageManager = struct { .apply_patch_task = if (patch_name_and_version_hash) |h| brk: { const dep = this.lockfile.buffers.dependencies.items[dependency_id]; const pkg_id = switch (this.lockfile.package_index.get(dep.name_hash) orelse @panic("Package not found")) { - .PackageID => |p| p, - .PackageIDMultiple => |ps| ps.items[0], // TODO is this correct + .id => |p| p, + .ids => |ps| ps.items[0], // TODO is this correct }; const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?; const pt = PatchTask.newApplyPatchHash(this, pkg_id, patch_hash, h); @@ -4980,9 +5027,9 @@ pub const PackageManager = struct { pub fn updateLockfileIfNeeded( manager: *PackageManager, - load_lockfile_result: Lockfile.LoadFromDiskResult, + load_result: Lockfile.LoadResult, ) !void { - if (load_lockfile_result == .ok and load_lockfile_result.ok.serializer_result.packages_need_update) { + if (load_result == .ok and load_result.ok.serializer_result.packages_need_update) { const slice = manager.lockfile.packages.slice(); for (slice.items(.meta)) |*meta| { // these are possibly updated later, but need to make sure non are zero @@ -6970,7 +7017,6 @@ pub const PackageManager = struct { // must be a variable due to global installs and bunx bin_path: stringZ = bun.pathLiteral("node_modules/.bin"), - lockfile_path: stringZ = Lockfile.default_filename, did_override_default_scope: bool = false, scope: Npm.Registry.Scope = undefined, @@ -7011,6 +7057,8 @@ pub const PackageManager = struct { ca: []const string = &.{}, ca_file_name: string = &.{}, + save_text_lockfile: bool = false, + pub const PublishConfig = struct { access: ?Access = null, tag: string = "", @@ -7397,6 +7445,8 @@ pub const PackageManager = struct { this.do.trust_dependencies_from_args = true; } + this.save_text_lockfile = cli.save_text_lockfile; + this.local_package_features.optional_dependencies = !cli.omit.optional; const disable_progress_bar = default_disable_progress_bar or cli.no_progress; @@ -8959,24 +9009,10 @@ pub const PackageManager = struct { ) -| std.time.s_per_day; if (root_dir.entries.hasComptimeQuery("bun.lockb")) { - var buf: bun.PathBuffer = undefined; - var parts = [_]string{ - "./bun.lockb", - }; - const lockfile_path = Path.joinAbsStringBuf( - Fs.FileSystem.instance.top_level_dir, - &buf, - &parts, - .auto, - ); - buf[lockfile_path.len] = 0; - const lockfile_path_z = buf[0..lockfile_path.len :0]; - - switch (manager.lockfile.loadFromDisk( + switch (manager.lockfile.loadFromCwd( manager, allocator, log, - lockfile_path_z, true, )) { .ok => |load| manager.lockfile = load.lockfile, @@ -9383,6 +9419,7 @@ pub const PackageManager = struct { clap.parseParam("--registry Use a specific registry by default, overriding .npmrc, bunfig.toml and environment variables") catch unreachable, clap.parseParam("--concurrent-scripts Maximum number of concurrent jobs for lifecycle scripts (default 5)") catch unreachable, clap.parseParam("--network-concurrency Maximum number of concurrent network requests (default 48)") catch unreachable, + clap.parseParam("--save-text-lockfile Save a text-based lockfile") catch unreachable, clap.parseParam("-h, --help Print this help menu") catch unreachable, }; @@ -9511,6 +9548,8 @@ pub const PackageManager = struct { ca: []const string = &.{}, ca_file_name: string = "", + save_text_lockfile: bool = false, + const PatchOpts = union(enum) { nothing: struct {}, patch: struct {}, @@ -9863,6 +9902,10 @@ pub const PackageManager = struct { }; } + if (args.flag("--save-text-lockfile")) { + cli.save_text_lockfile = true; + } + // commands that support --filter if (comptime subcommand.supportsWorkspaceFiltering()) { cli.filters = args.options("--filter"); @@ -10874,8 +10917,8 @@ pub const PackageManager = struct { } } - fn nodeModulesFolderForDependencyIDs(iterator: *Lockfile.Tree.Iterator, ids: []const IdPair) !?Lockfile.Tree.NodeModulesFolder { - while (iterator.nextNodeModulesFolder(null)) |node_modules| { + fn nodeModulesFolderForDependencyIDs(iterator: *Lockfile.Tree.Iterator(.node_modules), ids: []const IdPair) !?Lockfile.Tree.Iterator(.node_modules).Next { + while (iterator.next(null)) |node_modules| { for (ids) |id| { _ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, id[0]) orelse continue; return node_modules; @@ -10884,8 +10927,8 @@ pub const PackageManager = struct { return null; } - fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator, dependency_id: DependencyID) !?Lockfile.Tree.NodeModulesFolder { - while (iterator.nextNodeModulesFolder(null)) |node_modules| { + fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator(.node_modules), dependency_id: DependencyID) !?Lockfile.Tree.Iterator(.node_modules).Next { + while (iterator.next(null)) |node_modules| { _ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, dependency_id) orelse continue; return node_modules; } @@ -10897,11 +10940,11 @@ pub const PackageManager = struct { fn pkgInfoForNameAndVersion( lockfile: *Lockfile, - iterator: *Lockfile.Tree.Iterator, + iterator: *Lockfile.Tree.Iterator(.node_modules), pkg_maybe_version_to_patch: []const u8, name: []const u8, version: ?[]const u8, - ) struct { PackageID, Lockfile.Tree.NodeModulesFolder } { + ) struct { PackageID, Lockfile.Tree.Iterator(.node_modules).Next } { var sfb = std.heap.stackFallback(@sizeOf(IdPair) * 4, lockfile.allocator); var pairs = std.ArrayList(IdPair).initCapacity(sfb.get(), 8) catch bun.outOfMemory(); defer pairs.deinit(); @@ -11075,7 +11118,7 @@ pub const PackageManager = struct { const arg_kind: PatchArgKind = PatchArgKind.fromArg(argument); var folder_path_buf: bun.PathBuffer = undefined; - var iterator = Lockfile.Tree.Iterator.init(manager.lockfile); + var iterator = Lockfile.Tree.Iterator(.node_modules).init(manager.lockfile); var resolution_buf: [1024]u8 = undefined; var win_normalizer: if (bun.Environment.isWindows) bun.PathBuffer else struct {} = undefined; @@ -11115,7 +11158,7 @@ pub const PackageManager = struct { defer manager.allocator.free(package_json_source.contents); initializeStore(); - const json = JSON.parsePackageJSONUTF8AlwaysDecode(&package_json_source, manager.log, manager.allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(&package_json_source, manager.log, manager.allocator) catch |err| { manager.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); Global.crash(); @@ -11143,8 +11186,8 @@ pub const PackageManager = struct { ); Global.crash(); }) { - .PackageID => |id| lockfile.packages.get(id), - .PackageIDMultiple => |ids| id: { + .id => |id| lockfile.packages.get(id), + .ids => |ids| id: { for (ids.items) |id| { const pkg = lockfile.packages.get(id); const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; @@ -11191,7 +11234,7 @@ pub const PackageManager = struct { }, .name_and_version => brk: { const pkg_maybe_version_to_patch = argument; - const name, const version = Dependency.splitNameAndVersion(pkg_maybe_version_to_patch); + const name, const version = Dependency.splitNameAndMaybeVersion(pkg_maybe_version_to_patch); const pkg_id, const folder = pkgInfoForNameAndVersion(manager.lockfile, &iterator, pkg_maybe_version_to_patch, name, version); const pkg = manager.lockfile.packages.get(pkg_id); @@ -11442,7 +11485,7 @@ pub const PackageManager = struct { var folder_path_buf: bun.PathBuffer = undefined; var lockfile: *Lockfile = try manager.allocator.create(Lockfile); defer lockfile.deinit(); - switch (lockfile.loadFromDisk(manager, manager.allocator, manager.log, manager.options.lockfile_path, true)) { + switch (lockfile.loadFromCwd(manager, manager.allocator, manager.log, true)) { .not_found => { Output.errGeneric("Cannot find lockfile. Install packages with `bun install` before patching them.", .{}); Global.crash(); @@ -11507,7 +11550,7 @@ pub const PackageManager = struct { }; defer root_node_modules.close(); - var iterator = Lockfile.Tree.Iterator.init(lockfile); + var iterator = Lockfile.Tree.Iterator(.node_modules).init(lockfile); var resolution_buf: [1024]u8 = undefined; const _cache_dir: std.fs.Dir, const _cache_dir_subpath: stringZ, const _changes_dir: []const u8, const _pkg: Package = switch (arg_kind) { .path => result: { @@ -11528,7 +11571,7 @@ pub const PackageManager = struct { defer manager.allocator.free(package_json_source.contents); initializeStore(); - const json = JSON.parsePackageJSONUTF8AlwaysDecode(&package_json_source, manager.log, manager.allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(&package_json_source, manager.log, manager.allocator) catch |err| { manager.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); Global.crash(); @@ -11556,8 +11599,8 @@ pub const PackageManager = struct { ); Global.crash(); }) { - .PackageID => |id| lockfile.packages.get(id), - .PackageIDMultiple => |ids| brk: { + .id => |id| lockfile.packages.get(id), + .ids => |ids| brk: { for (ids.items) |id| { const pkg = lockfile.packages.get(id); const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; @@ -11586,7 +11629,7 @@ pub const PackageManager = struct { break :result .{ cache_dir, cache_dir_subpath, changes_dir, actual_package }; }, .name_and_version => brk: { - const name, const version = Dependency.splitNameAndVersion(argument); + const name, const version = Dependency.splitNameAndMaybeVersion(argument); const pkg_id, const node_modules = pkgInfoForNameAndVersion(lockfile, &iterator, argument, name, version); const changes_dir = bun.path.joinZBuf(pathbuf[0..], &[_][]const u8{ @@ -12105,7 +12148,7 @@ pub const PackageManager = struct { lockfile: *Lockfile, progress: *Progress, - // relative paths from `nextNodeModulesFolder` will be copied into this list. + // relative paths from `next` will be copied into this list. node_modules: NodeModulesFolder, skip_verify_installed_version_number: bool, @@ -12116,13 +12159,14 @@ pub const PackageManager = struct { options: *const PackageManager.Options, metas: []const Lockfile.Package.Meta, names: []const String, - bins: []const Bin, + pkg_name_hashes: []const PackageNameHash, + bins: []Bin, resolutions: []Resolution, node: *Progress.Node, destination_dir_subpath_buf: bun.PathBuffer = undefined, folder_path_buf: bun.PathBuffer = undefined, successfully_installed: Bitset, - tree_iterator: *Lockfile.Tree.Iterator, + tree_iterator: *Lockfile.Tree.Iterator(.node_modules), command_ctx: Command.Context, current_tree_id: Lockfile.Tree.Id = Lockfile.Tree.invalid_id, @@ -12273,7 +12317,7 @@ pub const PackageManager = struct { } pub fn linkRemainingBins(this: *PackageInstaller, comptime log_level: Options.LogLevel) void { - var depth_buf: Lockfile.Tree.Iterator.DepthBuf = undefined; + var depth_buf: Lockfile.Tree.DepthBuf = undefined; var node_modules_rel_path_buf: bun.PathBuffer = undefined; @memcpy(node_modules_rel_path_buf[0.."node_modules".len], "node_modules"); @@ -12291,6 +12335,7 @@ pub const PackageManager = struct { @intCast(tree_id), &node_modules_rel_path_buf, &depth_buf, + .node_modules, ); this.node_modules.path.appendSlice(rel_path) catch bun.outOfMemory(); @@ -12374,7 +12419,7 @@ pub const PackageManager = struct { // packages upon completing the current tree for (tree.pending_installs.items) |context| { const package_id = resolutions[context.dependency_id]; - const name = lockfile.str(&this.names[package_id]); + const name = this.names[package_id]; const resolution = &this.resolutions[package_id]; this.node_modules.tree_id = context.tree_id; this.node_modules.path = context.path; @@ -12492,6 +12537,7 @@ pub const PackageManager = struct { var packages = this.lockfile.packages.slice(); this.metas = packages.items(.meta); this.names = packages.items(.name); + this.pkg_name_hashes = packages.items(.name_hash); this.bins = packages.items(.bin); this.resolutions = packages.items(.resolution); @@ -12514,21 +12560,21 @@ pub const PackageManager = struct { comptime log_level: Options.LogLevel, ) void { const package_id = this.lockfile.buffers.resolutions.items[dependency_id]; - const name = this.lockfile.str(&this.names[package_id]); + const name = this.names[package_id]; const resolution = &this.resolutions[package_id]; const task_id = switch (resolution.tag) { .git => Task.Id.forGitCheckout(data.url, data.resolved), .github => Task.Id.forTarball(data.url), .local_tarball => Task.Id.forTarball(this.lockfile.str(&resolution.value.local_tarball)), .remote_tarball => Task.Id.forTarball(this.lockfile.str(&resolution.value.remote_tarball)), - .npm => Task.Id.forNPMPackage(name, resolution.value.npm.version), + .npm => Task.Id.forNPMPackage(name.slice(this.lockfile.buffers.string_bytes.items), resolution.value.npm.version), else => unreachable, }; if (!this.installEnqueuedPackagesImpl(name, task_id, log_level)) { if (comptime Environment.allow_assert) { Output.panic("Ran callback to install enqueued packages, but there was no task associated with it. {}:{} (dependency_id: {d})", .{ - bun.fmt.quote(name), + bun.fmt.quote(name.slice(this.lockfile.buffers.string_bytes.items)), bun.fmt.quote(data.url), dependency_id, }); @@ -12538,7 +12584,7 @@ pub const PackageManager = struct { pub fn installEnqueuedPackagesImpl( this: *PackageInstaller, - name: []const u8, + name: String, task_id: Task.Id.Type, comptime log_level: Options.LogLevel, ) bool { @@ -12635,7 +12681,7 @@ pub const PackageManager = struct { } switch (resolution_tag) { - .git, .github, .gitlab, .root => { + .git, .github, .root => { inline for (Lockfile.Scripts.names) |script_name| { count += @intFromBool(!@field(scripts, script_name).isEmpty()); } @@ -12676,7 +12722,7 @@ pub const PackageManager = struct { dependency_id: DependencyID, package_id: PackageID, comptime log_level: Options.LogLevel, - name: string, + pkg_name: String, resolution: *const Resolution, // false when coming from download. if the package was downloaded @@ -12687,30 +12733,34 @@ pub const PackageManager = struct { // pending packages if we're already draining them. comptime is_pending_package_install: bool, ) void { - const buf = this.lockfile.buffers.string_bytes.items; - - const alias = this.lockfile.buffers.dependencies.items[dependency_id].name.slice(buf); + const alias = this.lockfile.buffers.dependencies.items[dependency_id].name; const destination_dir_subpath: [:0]u8 = brk: { - bun.copy(u8, &this.destination_dir_subpath_buf, alias); - this.destination_dir_subpath_buf[alias.len] = 0; - break :brk this.destination_dir_subpath_buf[0..alias.len :0]; + const alias_slice = alias.slice(this.lockfile.buffers.string_bytes.items); + bun.copy(u8, &this.destination_dir_subpath_buf, alias_slice); + this.destination_dir_subpath_buf[alias_slice.len] = 0; + break :brk this.destination_dir_subpath_buf[0..alias_slice.len :0]; }; + const pkg_name_hash = this.pkg_name_hashes[package_id]; + var resolution_buf: [512]u8 = undefined; const package_version = if (resolution.tag == .workspace) brk: { - if (this.manager.lockfile.workspace_versions.get(String.Builder.stringHash(name))) |workspace_version| { - break :brk std.fmt.bufPrint(&resolution_buf, "{}", .{workspace_version.fmt(buf)}) catch unreachable; + if (this.manager.lockfile.workspace_versions.get(pkg_name_hash)) |workspace_version| { + break :brk std.fmt.bufPrint(&resolution_buf, "{}", .{workspace_version.fmt(this.lockfile.buffers.string_bytes.items)}) catch unreachable; } // no version break :brk ""; - } else std.fmt.bufPrint(&resolution_buf, "{}", .{resolution.fmt(buf, .posix)}) catch unreachable; + } else std.fmt.bufPrint(&resolution_buf, "{}", .{resolution.fmt(this.lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; const patch_patch, const patch_contents_hash, const patch_name_and_version_hash, const remove_patch = brk: { if (this.manager.lockfile.patched_dependencies.entries.len == 0 and this.manager.patched_dependencies_to_remove.entries.len == 0) break :brk .{ null, null, null, false }; var sfa = std.heap.stackFallback(1024, this.lockfile.allocator); const alloc = sfa.get(); - const name_and_version = std.fmt.allocPrint(alloc, "{s}@{s}", .{ name, package_version }) catch unreachable; + const name_and_version = std.fmt.allocPrint(alloc, "{s}@{s}", .{ + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + package_version, + }) catch unreachable; defer alloc.free(name_and_version); const name_and_version_hash = String.Builder.stringHash(name_and_version); @@ -12745,7 +12795,7 @@ pub const PackageManager = struct { .destination_dir_subpath = destination_dir_subpath, .destination_dir_subpath_buf = &this.destination_dir_subpath_buf, .allocator = this.lockfile.allocator, - .package_name = name, + .package_name = pkg_name, .patch = if (patch_patch) |p| PackageInstall.Patch{ .patch_contents_hash = patch_contents_hash.?, .patch_path = p, @@ -12755,12 +12805,19 @@ pub const PackageManager = struct { .node_modules = &this.node_modules, .lockfile = this.lockfile, }; - debug("Installing {s}@{s}", .{ name, resolution.fmt(buf, .posix) }); + debug("Installing {s}@{s}", .{ + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + resolution.fmt(this.lockfile.buffers.string_bytes.items, .posix), + }); const pkg_has_patch = !installer.patch.isNull(); switch (resolution.tag) { .npm => { - installer.cache_dir_subpath = this.manager.cachedNPMPackageFolderName(name, resolution.value.npm.version, patch_contents_hash); + installer.cache_dir_subpath = this.manager.cachedNPMPackageFolderName( + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + resolution.value.npm.version, + patch_contents_hash, + ); installer.cache_dir = this.manager.getCacheDirectory(); }, .git => { @@ -12772,7 +12829,7 @@ pub const PackageManager = struct { installer.cache_dir = this.manager.getCacheDirectory(); }, .folder => { - const folder = resolution.value.folder.slice(buf); + const folder = resolution.value.folder.slice(this.lockfile.buffers.string_bytes.items); if (this.lockfile.isWorkspaceTreeId(this.current_tree_id)) { // Handle when a package depends on itself via file: @@ -12805,7 +12862,7 @@ pub const PackageManager = struct { installer.cache_dir = this.manager.getCacheDirectory(); }, .workspace => { - const folder = resolution.value.workspace.slice(buf); + const folder = resolution.value.workspace.slice(this.lockfile.buffers.string_bytes.items); // Handle when a package depends on itself if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { installer.cache_dir_subpath = "."; @@ -12824,7 +12881,7 @@ pub const PackageManager = struct { const directory = this.manager.globalLinkDir() catch |err| { if (comptime log_level != .silent) { const fmt = "\nerror: unable to access global directory while installing {s}: {s}\n"; - const args = .{ name, @errorName(err) }; + const args = .{ pkg_name.slice(this.lockfile.buffers.string_bytes.items), @errorName(err) }; if (comptime log_level.showProgress()) { switch (Output.enable_ansi_colors) { @@ -12848,7 +12905,7 @@ pub const PackageManager = struct { return; }; - const folder = resolution.value.symlink.slice(buf); + const folder = resolution.value.symlink.slice(this.lockfile.buffers.string_bytes.items); if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { installer.cache_dir_subpath = "."; @@ -12880,11 +12937,19 @@ pub const PackageManager = struct { }, } - const needs_install = this.force_install or this.skip_verify_installed_version_number or !needs_verify or remove_patch or !installer.verify( - resolution, - buf, - this.root_node_modules_folder, - ); + const needs_install = this.force_install or this.skip_verify_installed_version_number or !needs_verify or remove_patch or verify: { + const verified = installer.verify( + resolution, + this.root_node_modules_folder, + &this.bins[package_id], + ); + + if (verified.update_lockfile_pointers) { + this.fixCachedLockfilePackageSlices(); + } + + break :verify !verified.valid; + }; this.summary.skipped += @intFromBool(!needs_install); if (needs_install) { @@ -12904,7 +12969,7 @@ pub const PackageManager = struct { .git => { this.manager.enqueueGitForCheckout( dependency_id, - alias, + alias.slice(this.lockfile.buffers.string_bytes.items), resolution, context, patch_name_and_version_hash, @@ -12924,7 +12989,7 @@ pub const PackageManager = struct { .local_tarball => { this.manager.enqueueTarballForReading( dependency_id, - alias, + alias.slice(this.lockfile.buffers.string_bytes.items), resolution, context, ); @@ -12933,7 +12998,7 @@ pub const PackageManager = struct { this.manager.enqueueTarballForDownload( dependency_id, package_id, - resolution.value.remote_tarball.slice(buf), + resolution.value.remote_tarball.slice(this.lockfile.buffers.string_bytes.items), context, patch_name_and_version_hash, ); @@ -12943,16 +13008,19 @@ pub const PackageManager = struct { // Very old versions of Bun didn't store the tarball url when it didn't seem necessary // This caused bugs. We can't assert on it because they could come from old lockfiles if (resolution.value.npm.url.isEmpty()) { - Output.debugWarn("package {s}@{} missing tarball_url", .{ name, resolution.fmt(buf, .posix) }); + Output.debugWarn("package {s}@{} missing tarball_url", .{ + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + resolution.fmt(this.lockfile.buffers.string_bytes.items, .posix), + }); } } this.manager.enqueuePackageForDownload( - name, + pkg_name.slice(this.lockfile.buffers.string_bytes.items), dependency_id, package_id, resolution.value.npm.version, - resolution.value.npm.url.slice(buf), + resolution.value.npm.url.slice(this.lockfile.buffers.string_bytes.items), context, patch_name_and_version_hash, ); @@ -13002,7 +13070,7 @@ pub const PackageManager = struct { var destination_dir = this.node_modules.makeAndOpenDir(this.root_node_modules_folder) catch |err| { if (log_level != .silent) { Output.err(err, "Failed to open node_modules folder for {s} in {s}", .{ - name, + pkg_name.slice(this.lockfile.buffers.string_bytes.items), bun.fmt.fmtPath(u8, this.node_modules.path.items, .{}), }); } @@ -13051,21 +13119,32 @@ pub const PackageManager = struct { this.node.completeOne(); } + if (this.bins[package_id].isUnset()) { + this.bins[package_id] = this.getPackageBin( + &installer, + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + pkg_name_hash, + resolution, + ) catch |err| switch (err) { + error.OutOfMemory => bun.outOfMemory(), + }; + } + if (this.bins[package_id].tag != .none) { this.trees[this.current_tree_id].binaries.add(dependency_id) catch bun.outOfMemory(); } const dep = this.lockfile.buffers.dependencies.items[dependency_id]; - const name_hash: TruncatedPackageNameHash = @truncate(dep.name_hash); + const truncated_dep_name_hash: TruncatedPackageNameHash = @truncate(dep.name_hash); const is_trusted, const is_trusted_through_update_request = brk: { - if (this.trusted_dependencies_from_update_requests.contains(name_hash)) break :brk .{ true, true }; - if (this.lockfile.hasTrustedDependency(alias)) break :brk .{ true, false }; + if (this.trusted_dependencies_from_update_requests.contains(truncated_dep_name_hash)) break :brk .{ true, true }; + if (this.lockfile.hasTrustedDependency(alias.slice(this.lockfile.buffers.string_bytes.items))) break :brk .{ true, false }; break :brk .{ false, false }; }; if (resolution.tag != .root and (resolution.tag == .workspace or is_trusted)) { if (this.enqueueLifecycleScripts( - alias, + alias.slice(this.lockfile.buffers.string_bytes.items), log_level, destination_dir, package_id, @@ -13075,11 +13154,11 @@ pub const PackageManager = struct { if (is_trusted_through_update_request) { this.manager.trusted_deps_to_add_to_package_json.append( this.manager.allocator, - this.manager.allocator.dupe(u8, alias) catch bun.outOfMemory(), + this.manager.allocator.dupe(u8, alias.slice(this.lockfile.buffers.string_bytes.items)) catch bun.outOfMemory(), ) catch bun.outOfMemory(); if (this.lockfile.trusted_dependencies == null) this.lockfile.trusted_dependencies = .{}; - this.lockfile.trusted_dependencies.?.put(this.manager.allocator, name_hash, {}) catch bun.outOfMemory(); + this.lockfile.trusted_dependencies.?.put(this.manager.allocator, truncated_dep_name_hash, {}) catch bun.outOfMemory(); } } } @@ -13091,16 +13170,22 @@ pub const PackageManager = struct { else => if (!is_trusted and this.metas[package_id].hasInstallScript()) { // Check if the package actually has scripts. `hasInstallScript` can be false positive if a package is published with // an auto binding.gyp rebuild script but binding.gyp is excluded from the published files. - const count = this.getInstalledPackageScriptsCount(alias, package_id, resolution.tag, destination_dir, log_level); + const count = this.getInstalledPackageScriptsCount( + alias.slice(this.lockfile.buffers.string_bytes.items), + package_id, + resolution.tag, + destination_dir, + log_level, + ); if (count > 0) { if (comptime log_level.isVerbose()) { Output.prettyError("Blocked {d} scripts for: {s}@{}\n", .{ count, - alias, + alias.slice(this.lockfile.buffers.string_bytes.items), resolution.fmt(this.lockfile.buffers.string_bytes.items, .posix), }); } - const entry = this.summary.packages_with_blocked_scripts.getOrPut(this.manager.allocator, name_hash) catch bun.outOfMemory(); + const entry = this.summary.packages_with_blocked_scripts.getOrPut(this.manager.allocator, truncated_dep_name_hash) catch bun.outOfMemory(); if (!entry.found_existing) entry.value_ptr.* = 0; entry.value_ptr.* += count; } @@ -13121,7 +13206,7 @@ pub const PackageManager = struct { if (cause.err == error.DanglingSymlink) { Output.prettyErrorln( "error: {s} \"link:{s}\" not found (try running 'bun link' in the intended package's folder)", - .{ @errorName(cause.err), this.names[package_id].slice(buf) }, + .{ @errorName(cause.err), this.names[package_id].slice(this.lockfile.buffers.string_bytes.items) }, ); this.summary.fail += 1; } else if (cause.err == error.AccessDenied) { @@ -13137,7 +13222,7 @@ pub const PackageManager = struct { if (!Environment.isWindows) { const stat = bun.sys.fstat(bun.toFD(destination_dir)).unwrap() catch |err| { Output.err("EACCES", "Permission denied while installing {s}", .{ - this.names[package_id].slice(buf), + this.names[package_id].slice(this.lockfile.buffers.string_bytes.items), }); if (Environment.isDebug) { Output.err(err, "Failed to stat node_modules", .{}); @@ -13161,20 +13246,30 @@ pub const PackageManager = struct { } Output.err("EACCES", "Permission denied while installing {s}", .{ - this.names[package_id].slice(buf), + this.names[package_id].slice(this.lockfile.buffers.string_bytes.items), }); this.summary.fail += 1; } else { Output.prettyErrorln( "error: {s} installing {s} ({s})", - .{ @errorName(cause.err), this.names[package_id].slice(buf), install_result.fail.step.name() }, + .{ @errorName(cause.err), this.names[package_id].slice(this.lockfile.buffers.string_bytes.items), install_result.fail.step.name() }, ); this.summary.fail += 1; } }, } } else { + if (this.bins[package_id].isUnset()) { + this.bins[package_id] = this.getPackageBin( + &installer, + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + pkg_name_hash, + resolution, + ) catch |err| switch (err) { + error.OutOfMemory => bun.outOfMemory(), + }; + } if (this.bins[package_id].tag != .none) { this.trees[this.current_tree_id].binaries.add(dependency_id) catch bun.outOfMemory(); } @@ -13182,7 +13277,7 @@ pub const PackageManager = struct { var destination_dir = this.node_modules.makeAndOpenDir(this.root_node_modules_folder) catch |err| { if (log_level != .silent) { Output.err(err, "Failed to open node_modules folder for {s} in {s}", .{ - name, + pkg_name.slice(this.lockfile.buffers.string_bytes.items), bun.fmt.fmtPath(u8, this.node_modules.path.items, .{}), }); } @@ -13198,12 +13293,12 @@ pub const PackageManager = struct { defer if (!pkg_has_patch) this.incrementTreeInstallCount(this.current_tree_id, destination_dir, !is_pending_package_install, log_level); const dep = this.lockfile.buffers.dependencies.items[dependency_id]; - const name_hash: TruncatedPackageNameHash = @truncate(dep.name_hash); + const truncated_dep_name_hash: TruncatedPackageNameHash = @truncate(dep.name_hash); const is_trusted, const is_trusted_through_update_request, const add_to_lockfile = brk: { // trusted through a --trust dependency. need to enqueue scripts, write to package.json, and add to lockfile - if (this.trusted_dependencies_from_update_requests.contains(name_hash)) break :brk .{ true, true, true }; + if (this.trusted_dependencies_from_update_requests.contains(truncated_dep_name_hash)) break :brk .{ true, true, true }; - if (this.manager.summary.added_trusted_dependencies.get(name_hash)) |should_add_to_lockfile| { + if (this.manager.summary.added_trusted_dependencies.get(truncated_dep_name_hash)) |should_add_to_lockfile| { // is a new trusted dependency. need to enqueue scripts and maybe add to lockfile break :brk .{ true, false, should_add_to_lockfile }; } @@ -13212,7 +13307,7 @@ pub const PackageManager = struct { if (resolution.tag != .root and is_trusted) { if (this.enqueueLifecycleScripts( - alias, + alias.slice(this.lockfile.buffers.string_bytes.items), log_level, destination_dir, package_id, @@ -13222,19 +13317,82 @@ pub const PackageManager = struct { if (is_trusted_through_update_request) { this.manager.trusted_deps_to_add_to_package_json.append( this.manager.allocator, - this.manager.allocator.dupe(u8, alias) catch bun.outOfMemory(), + this.manager.allocator.dupe(u8, alias.slice(this.lockfile.buffers.string_bytes.items)) catch bun.outOfMemory(), ) catch bun.outOfMemory(); } if (add_to_lockfile) { if (this.lockfile.trusted_dependencies == null) this.lockfile.trusted_dependencies = .{}; - this.lockfile.trusted_dependencies.?.put(this.manager.allocator, name_hash, {}) catch bun.outOfMemory(); + this.lockfile.trusted_dependencies.?.put(this.manager.allocator, truncated_dep_name_hash, {}) catch bun.outOfMemory(); } } } } } + fn getPackageBin( + this: *PackageInstaller, + installer: *PackageInstall, + pkg_name: string, + pkg_name_hash: PackageNameHash, + resolution: *const Resolution, + ) OOM!Bin { + defer this.fixCachedLockfilePackageSlices(); + + if (resolution.tag == .npm) { + var expired = false; + if (this.manager.manifests.byNameHashAllowExpired( + this.manager, + this.manager.scopeForPackageName(pkg_name), + pkg_name_hash, + &expired, + .load_from_memory_fallback_to_disk, + )) |manifest| { + if (manifest.findByVersion(resolution.value.npm.version)) |find| { + return find.package.bin.cloneAppend(manifest.string_buf, manifest.extern_strings_bin_entries, this.lockfile); + } + } + } + + // get it from package.json + var body_pool = Npm.Registry.BodyPool.get(this.lockfile.allocator); + var mutable = body_pool.data; + defer { + body_pool.data = mutable; + Npm.Registry.BodyPool.release(body_pool); + } + + const source = installer.getInstalledPackageJsonSource(this.root_node_modules_folder, &mutable, resolution.tag) orelse return .{}; + + initializeStore(); + + var log = logger.Log.init(this.lockfile.allocator); + defer log.deinit(); + + var bin_finder = JSON.PackageJSONVersionChecker.init( + this.lockfile.allocator, + &source, + &log, + .only_bin, + ) catch return .{}; + _ = bin_finder.parseExpr(false, false) catch return .{}; + + if (bin_finder.has_found_bin) { + var string_buf = this.lockfile.stringBuf(); + defer { + string_buf.apply(this.lockfile); + this.fixCachedLockfilePackageSlices(); + } + + return switch (bin_finder.found_bin) { + .bin => |bin| try Bin.parseAppend(this.lockfile.allocator, bin, &string_buf, &this.lockfile.buffers.extern_strings), + .dir => |dir| try Bin.parseAppendFromDirectories(this.lockfile.allocator, dir, &string_buf), + }; + } + + return .{}; + } + // returns true if scripts are enqueued fn enqueueLifecycleScripts( this: *PackageInstaller, @@ -13340,7 +13498,7 @@ pub const PackageManager = struct { return; } - const name = this.lockfile.str(&this.names[package_id]); + const name = this.names[package_id]; const resolution = &this.resolutions[package_id]; const needs_verify = true; @@ -13613,7 +13771,7 @@ pub const PackageManager = struct { }; { - var iterator = Lockfile.Tree.Iterator.init(this.lockfile); + var iterator = Lockfile.Tree.Iterator(.node_modules).init(this.lockfile); if (comptime Environment.isPosix) { Bin.Linker.ensureUmask(); } @@ -13706,6 +13864,7 @@ pub const PackageManager = struct { .bins = parts.items(.bin), .root_node_modules_folder = node_modules_folder, .names = parts.items(.name), + .pkg_name_hashes = parts.items(.name_hash), .resolutions = parts.items(.resolution), .lockfile = this.lockfile, .node = &install_node, @@ -13753,7 +13912,7 @@ pub const PackageManager = struct { defer installer.deinit(); - while (iterator.nextNodeModulesFolder(&installer.completed_trees)) |node_modules| { + while (iterator.next(&installer.completed_trees)) |node_modules| { installer.node_modules.path.items.len = strings.withoutTrailingSlash(FileSystem.instance.top_level_dir).len + 1; try installer.node_modules.path.appendSlice(node_modules.relative_path); installer.node_modules.tree_id = node_modules.tree_id; @@ -13967,7 +14126,7 @@ pub const PackageManager = struct { const buf = this.lockfile.buffers.string_bytes.items; // need to clone because this is a copy before Lockfile.cleanWithLogger - const name = this.allocator.dupe(u8, root_package.name.slice(buf)) catch bun.outOfMemory(); + const name = root_package.name.slice(buf); const top_level_dir_without_trailing_slash = strings.withoutTrailingSlash(FileSystem.instance.top_level_dir); if (root_package.scripts.hasAny()) { @@ -14012,24 +14171,24 @@ pub const PackageManager = struct { bun.dns.internal.prefetch(manager.event_loop.loop(), hostname); } - var load_lockfile_result: Lockfile.LoadFromDiskResult = if (manager.options.do.load_lockfile) - manager.lockfile.loadFromDisk( + var load_result: Lockfile.LoadResult = if (manager.options.do.load_lockfile) + manager.lockfile.loadFromCwd( manager, manager.allocator, manager.log, - manager.options.lockfile_path, true, ) else .{ .not_found = {} }; - try manager.updateLockfileIfNeeded(load_lockfile_result); + try manager.updateLockfileIfNeeded(load_result); var root = Lockfile.Package{}; - var needs_new_lockfile = load_lockfile_result != .ok or - (load_lockfile_result.ok.lockfile.buffers.dependencies.items.len == 0 and manager.update_requests.len > 0); + var needs_new_lockfile = load_result != .ok or + (load_result.ok.lockfile.buffers.dependencies.items.len == 0 and manager.update_requests.len > 0); - manager.options.enable.force_save_lockfile = manager.options.enable.force_save_lockfile or (load_lockfile_result == .ok and load_lockfile_result.ok.was_migrated); + manager.options.enable.force_save_lockfile = manager.options.enable.force_save_lockfile or + (load_result == .ok and (load_result.ok.was_migrated or (load_result.ok.format == .binary and manager.options.save_text_lockfile))); // this defaults to false // but we force allowing updates to the lockfile when you do bun add @@ -14039,32 +14198,32 @@ pub const PackageManager = struct { // Step 2. Parse the package.json file const root_package_json_source = logger.Source.initPathString(package_json_cwd, root_package_json_contents); - switch (load_lockfile_result) { + switch (load_result) { .err => |cause| { if (log_level != .silent) { switch (cause.step) { - .open_file => Output.prettyError("error opening lockfile: {s}\n", .{ - @errorName(cause.value), + .open_file => Output.err(cause.value, "failed to open lockfile: '{s}'", .{ + cause.lockfile_path, }), - .parse_file => Output.prettyError("error parsing lockfile: {s}\n", .{ - @errorName(cause.value), + .parse_file => Output.err(cause.value, "failed to parse lockfile: '{s}'", .{ + cause.lockfile_path, }), - .read_file => Output.prettyError("error reading lockfile: {s}\n", .{ - @errorName(cause.value), + .read_file => Output.err(cause.value, "failed to read lockfile: '{s}'", .{ + cause.lockfile_path, }), - .migrating => Output.prettyError("error migrating lockfile: {s}\n", .{ - @errorName(cause.value), + .migrating => Output.err(cause.value, "failed to migrate lockfile: '{s}'", .{ + cause.lockfile_path, }), } - if (manager.options.enable.fail_early) { - Output.prettyError("failed to load lockfile\n", .{}); - } else { - Output.prettyError("ignoring lockfile\n", .{}); + if (!manager.options.enable.fail_early) { + Output.printErrorln("", .{}); + Output.warn("Ignoring lockfile", .{}); } if (ctx.log.errors > 0) { try manager.log.print(Output.errorWriter()); + manager.log.reset(); } Output.flush(); } @@ -14111,7 +14270,7 @@ pub const PackageManager = struct { } } differ: { - root = load_lockfile_result.ok.lockfile.rootPackage() orelse { + root = load_result.ok.lockfile.rootPackage() orelse { needs_new_lockfile = true; break :differ; }; @@ -14352,7 +14511,7 @@ pub const PackageManager = struct { root = .{}; manager.lockfile.initEmpty(manager.allocator); - if (manager.options.enable.frozen_lockfile and load_lockfile_result != .not_found) { + if (manager.options.enable.frozen_lockfile and load_result != .not_found) { if (comptime log_level != .silent) { Output.prettyErrorln("error: lockfile had changes, but lockfile is frozen", .{}); } @@ -14592,7 +14751,7 @@ pub const PackageManager = struct { const packages_len_before_install = manager.lockfile.packages.len; - if (manager.options.enable.frozen_lockfile and load_lockfile_result != .not_found) { + if (manager.options.enable.frozen_lockfile and load_result != .not_found) { if (manager.lockfile.hasMetaHashChanged(PackageManager.verbose_install or manager.options.do.print_meta_hash_string, packages_len_before_install) catch false) { if (comptime log_level != .silent) { Output.prettyErrorln("error: lockfile had changes, but lockfile is frozen", .{}); @@ -14631,15 +14790,21 @@ pub const PackageManager = struct { // this will handle new trusted dependencies added through --trust manager.update_requests.len > 0 or - (load_lockfile_result == .ok and load_lockfile_result.ok.serializer_result.packages_need_update); + (load_result == .ok and load_result.ok.serializer_result.packages_need_update); // It's unnecessary work to re-save the lockfile if there are no changes if (manager.options.do.save_lockfile and (should_save_lockfile or manager.lockfile.isEmpty() or manager.options.enable.force_save_lockfile)) save: { if (manager.lockfile.isEmpty()) { - if (!manager.options.dry_run) { - std.fs.cwd().deleteFileZ(manager.options.lockfile_path) catch |err| brk: { + if (!manager.options.dry_run) delete: { + const delete_format = switch (load_result) { + .not_found => break :delete, + .err => |err| err.format, + .ok => |ok| ok.format, + }; + + std.fs.cwd().deleteFileZ(if (delete_format == .text) "bun.lock" else "bun.lockb") catch |err| brk: { // we don't care if (err == error.FileNotFound) { if (had_any_diffs) break :save; @@ -14672,7 +14837,15 @@ pub const PackageManager = struct { manager.progress.refresh(); } - manager.lockfile.saveToDisk(manager.options.lockfile_path, manager.options.log_level.isVerbose()); + const save_format: Lockfile.LoadResult.LockfileFormat = if (manager.options.save_text_lockfile) + .text + else switch (load_result) { + .not_found => .binary, + .err => |err| err.format, + .ok => |ok| ok.format, + }; + + manager.lockfile.saveToDisk(save_format, manager.options.log_level.isVerbose()); if (comptime Environment.allow_assert) { if (manager.lockfile.hasMetaHashChanged(false, packages_len_before_install) catch false) { @@ -15001,6 +15174,11 @@ pub const bun_install_js_bindings = struct { const cwd = try args[0].toSliceOrNull(globalObject); defer cwd.deinit(); + var dir = bun.openDirAbsolute(cwd.slice()) catch |err| { + return globalObject.throw("failed to open: {s}, '{s}'", .{ @errorName(err), cwd.slice() }); + }; + defer dir.close(); + const lockfile_path = Path.joinAbsStringZ(cwd.slice(), &[_]string{"bun.lockb"}, .auto); var lockfile: Lockfile = undefined; @@ -15012,14 +15190,14 @@ pub const bun_install_js_bindings = struct { // as long as we aren't migration from `package-lock.json`, leaving this undefined is okay const manager = globalObject.bunVM().bundler.resolver.getPackageManager(); - const load_result: Lockfile.LoadFromDiskResult = lockfile.loadFromDisk(manager, allocator, &log, lockfile_path, true); + const load_result: Lockfile.LoadResult = lockfile.loadFromDir(bun.toFD(dir), manager, allocator, &log, true); switch (load_result) { .err => |err| { - return globalObject.throw("failed to load lockfile: {s}, \"{s}\"", .{ @errorName(err.value), lockfile_path }); + return globalObject.throw("failed to load lockfile: {s}, '{s}'", .{ @errorName(err.value), lockfile_path }); }, .not_found => { - return globalObject.throw("lockfile not found: \"{s}\"", .{lockfile_path}); + return globalObject.throw("lockfile not found: '{s}'", .{lockfile_path}); }, .ok => {}, } diff --git a/src/install/integrity.zig b/src/install/integrity.zig index 726f824ee4a537..4e8f47d90e543b 100644 --- a/src/install/integrity.zig +++ b/src/install/integrity.zig @@ -66,7 +66,7 @@ pub const Integrity = extern struct { return integrity; } - pub fn parse(buf: []const u8) !Integrity { + pub fn parse(buf: []const u8) Integrity { if (buf.len < "sha256-".len) { return Integrity{ .tag = Tag.unknown, diff --git a/src/install/lifecycle_script_runner.zig b/src/install/lifecycle_script_runner.zig index 3900e790e8d35c..c2b2a089e5c1d7 100644 --- a/src/install/lifecycle_script_runner.zig +++ b/src/install/lifecycle_script_runner.zig @@ -10,11 +10,13 @@ const Global = bun.Global; const JSC = bun.JSC; const WaiterThread = bun.spawn.WaiterThread; const Timer = std.time.Timer; +const String = bun.Semver.String; +const string = bun.string; const Process = bun.spawn.Process; const log = Output.scoped(.Script, false); pub const LifecycleScriptSubprocess = struct { - package_name: []const u8, + package_name: string, scripts: Lockfile.Package.Scripts.List, current_script_index: u8 = 0, diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index bf93c609326211..eeb196159f6660 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -12,6 +12,8 @@ const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const C = bun.C; const JSAst = bun.JSAst; +const TextLockfile = @import("./bun.lock.zig"); +const OOM = bun.OOM; const JSLexer = bun.js_lexer; const logger = bun.logger; @@ -75,6 +77,7 @@ const ExternalStringMap = Install.ExternalStringMap; const Features = Install.Features; const initializeStore = Install.initializeStore; const invalid_package_id = Install.invalid_package_id; +const invalid_dependency_id = Install.invalid_dependency_id; const Origin = Install.Origin; const PackageID = Install.PackageID; const PackageInstall = Install.PackageInstall; @@ -129,6 +132,8 @@ const GlobWalker = bun.glob.GlobWalker_(ignoredWorkspacePaths, bun.glob.SyscallA /// The version of the lockfile format, intended to prevent data corruption for format changes. format: FormatVersion = FormatVersion.current, +text_lockfile_version: TextLockfile.Version = .v0, + meta_hash: MetaHash = zero_hash, packages: Lockfile.Package.List = .{}, @@ -209,63 +214,180 @@ pub fn isEmpty(this: *const Lockfile) bool { return this.packages.len == 0 or (this.packages.len == 1 and this.packages.get(0).resolutions.len == 0); } -pub const LoadFromDiskResult = union(enum) { +pub const LoadResult = union(enum) { not_found: void, err: struct { step: Step, value: anyerror, + lockfile_path: stringZ, + format: LockfileFormat, }, ok: struct { lockfile: *Lockfile, + loaded_from_binary_lockfile: bool, was_migrated: bool = false, serializer_result: Serializer.SerializerLoadResult, + format: LockfileFormat, }, + pub const LockfileFormat = enum { + text, + binary, + + pub fn filename(this: LockfileFormat) stringZ { + return switch (this) { + .text => "bun.lock", + .binary => "bun.lockb", + }; + } + }; + pub const Step = enum { open_file, read_file, parse_file, migrating }; }; -pub fn loadFromDisk( +pub fn loadFromCwd( this: *Lockfile, manager: ?*PackageManager, allocator: Allocator, log: *logger.Log, - filename: stringZ, comptime attempt_loading_from_other_lockfile: bool, -) LoadFromDiskResult { +) LoadResult { + return loadFromDir(this, bun.FD.cwd(), manager, allocator, log, attempt_loading_from_other_lockfile); +} + +pub fn loadFromDir( + this: *Lockfile, + dir: bun.FD, + manager: ?*PackageManager, + allocator: Allocator, + log: *logger.Log, + comptime attempt_loading_from_other_lockfile: bool, +) LoadResult { if (comptime Environment.allow_assert) assert(FileSystem.instance_loaded); - const buf = (if (filename.len > 0) - File.readFrom(std.fs.cwd(), filename, allocator).unwrap() - else - File.from(std.io.getStdIn()).readToEnd(allocator).unwrap()) catch |err| { - return switch (err) { - error.EACCESS, error.EPERM, error.ENOENT => { - if (comptime attempt_loading_from_other_lockfile) { - if (manager) |pm| { - // Attempt to load from "package-lock.json", "yarn.lock", etc. - return migration.detectAndLoadOtherLockfile( - this, - pm, - allocator, - log, - filename, - ); + var lockfile_format: LoadResult.LockfileFormat = .text; + const file = File.openat(dir, "bun.lock", bun.O.RDONLY, 0).unwrap() catch |text_open_err| file: { + if (text_open_err != error.ENOENT) { + return .{ .err = .{ + .step = .open_file, + .value = text_open_err, + .lockfile_path = "bun.lock", + .format = .text, + } }; + } + + lockfile_format = .binary; + + break :file File.openat(dir, "bun.lockb", bun.O.RDONLY, 0).unwrap() catch |binary_open_err| { + if (binary_open_err != error.ENOENT) { + return .{ .err = .{ + .step = .open_file, + .value = binary_open_err, + .lockfile_path = "bun.lockb", + .format = .binary, + } }; + } + + if (comptime attempt_loading_from_other_lockfile) { + if (manager) |pm| { + const migrate_result = migration.detectAndLoadOtherLockfile( + this, + dir, + pm, + allocator, + log, + ); + + if (migrate_result == .ok) { + lockfile_format = .text; } + + return migrate_result; } + } - return LoadFromDiskResult{ - .err = .{ .step = .open_file, .value = err }, - }; - }, - error.EINVAL, error.ENOTDIR, error.EISDIR => LoadFromDiskResult{ .not_found = {} }, - else => LoadFromDiskResult{ .err = .{ .step = .open_file, .value = err } }, + return .not_found; }; }; - return this.loadFromBytes(manager, buf, allocator, log); + const buf = file.readToEnd(allocator).unwrap() catch |err| { + return .{ .err = .{ + .step = .read_file, + .value = err, + .lockfile_path = if (lockfile_format == .text) "bun.lock" else "bun.lockb", + .format = lockfile_format, + } }; + }; + + if (lockfile_format == .text) { + const source = logger.Source.initPathString("bun.lock", buf); + const json = JSON.parsePackageJSONUTF8(&source, log, allocator) catch |err| { + return .{ + .err = .{ + .step = .parse_file, + .value = err, + .lockfile_path = "bun.lock", + .format = lockfile_format, + }, + }; + }; + + TextLockfile.parseIntoBinaryLockfile(this, allocator, json, &source, log, manager) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + else => { + return .{ + .err = .{ + .step = .parse_file, + .value = err, + .lockfile_path = "bun.lock", + .format = lockfile_format, + }, + }; + }, + } + }; + + return .{ + .ok = .{ + .lockfile = this, + .serializer_result = .{}, + .loaded_from_binary_lockfile = false, + .format = lockfile_format, + }, + }; + } + + const result = this.loadFromBytes(manager, buf, allocator, log); + + switch (result) { + .ok => { + if (bun.getenvZ("BUN_DEBUG_TEST_TEXT_LOCKFILE") != null) { + + // Convert the loaded binary lockfile into a text lockfile in memory, then + // parse it back into a binary lockfile. + + const text_lockfile_bytes = TextLockfile.Stringifier.saveFromBinary(allocator, result.ok.lockfile) catch |err| { + Output.panic("failed to convert binary lockfile to text lockfile: {s}", .{@errorName(err)}); + }; + + const source = logger.Source.initPathString("bun.lock", text_lockfile_bytes); + const json = JSON.parsePackageJSONUTF8(&source, log, allocator) catch |err| { + Output.panic("failed to print valid json from binary lockfile: {s}", .{@errorName(err)}); + }; + + TextLockfile.parseIntoBinaryLockfile(this, allocator, json, &source, log, manager) catch |err| { + Output.panic("failed to parse text lockfile converted from binary lockfile: {s}", .{@errorName(err)}); + }; + } + }, + else => {}, + } + + return result; } -pub fn loadFromBytes(this: *Lockfile, pm: ?*PackageManager, buf: []u8, allocator: Allocator, log: *logger.Log) LoadFromDiskResult { +pub fn loadFromBytes(this: *Lockfile, pm: ?*PackageManager, buf: []u8, allocator: Allocator, log: *logger.Log) LoadResult { var stream = Stream{ .buffer = buf, .pos = 0 }; this.format = FormatVersion.current; @@ -277,17 +399,19 @@ pub fn loadFromBytes(this: *Lockfile, pm: ?*PackageManager, buf: []u8, allocator this.patched_dependencies = .{}; const load_result = Lockfile.Serializer.load(this, &stream, allocator, log, pm) catch |err| { - return LoadFromDiskResult{ .err = .{ .step = .parse_file, .value = err } }; + return LoadResult{ .err = .{ .step = .parse_file, .value = err, .lockfile_path = "bun.lockb", .format = .binary } }; }; if (Environment.allow_assert) { this.verifyData() catch @panic("lockfile data is corrupt"); } - return LoadFromDiskResult{ + return LoadResult{ .ok = .{ .lockfile = this, .serializer_result = load_result, + .loaded_from_binary_lockfile = true, + .format = .binary, }, }; } @@ -299,7 +423,13 @@ pub const InstallResult = struct { pub const Tree = struct { id: Id = invalid_id, - dependency_id: DependencyID = invalid_package_id, + + // Should not be used for anything other than name + // through `folderName()`. There is not guarentee a dependency + // id chosen for a tree node is the same behavior or has the + // same version literal for packages hoisted. + dependency_id: DependencyID = invalid_dependency_id, + parent: Id = invalid_id, dependencies: Lockfile.DependencyIDSlice = .{}, @@ -309,6 +439,12 @@ pub const Tree = struct { pub const List = std.ArrayListUnmanaged(Tree); pub const Id = u32; + pub fn folderName(this: *const Tree, deps: []const Dependency, buf: string) string { + const dep_id = this.dependency_id; + if (dep_id == invalid_dependency_id) return ""; + return deps[dep_id].name.slice(buf); + } + pub fn toExternal(this: Tree) External { var out = External{}; out[0..4].* = @as(Id, @bitCast(this.id)); @@ -340,89 +476,105 @@ pub const Tree = struct { const SubtreeError = error{ OutOfMemory, DependencyLoop }; - pub const NodeModulesFolder = struct { - relative_path: stringZ, - dependencies: []const DependencyID, - tree_id: Tree.Id, + // max number of node_modules folders + pub const max_depth = (bun.MAX_PATH_BYTES / "node_modules".len) + 1; - /// depth of the node_modules folder in the tree - /// - /// 0 (./node_modules) - /// / \ - /// 1 1 - /// / - /// 2 - depth: usize, + pub const DepthBuf = [max_depth]Id; + + const IteratorPathStyle = enum { + /// `relative_path` will have the form `node_modules/jquery/node_modules/zod`. + /// Path separators are platform. + node_modules, + /// `relative_path` will have the form `jquery/zod`. Path separators are always + /// posix separators. + pkg_path, }; - // max number of node_modules folders - pub const max_depth = (bun.MAX_PATH_BYTES / "node_modules".len) + 1; + pub fn Iterator(comptime path_style: IteratorPathStyle) type { + return struct { + tree_id: Id, + path_buf: bun.PathBuffer = undefined, - pub const Iterator = struct { - tree_id: Id, - path_buf: bun.PathBuffer = undefined, - last_parent: Id = invalid_id, + lockfile: *const Lockfile, - lockfile: *const Lockfile, + depth_stack: DepthBuf = undefined, - depth_stack: DepthBuf = undefined, + pub fn init(lockfile: *const Lockfile) @This() { + var iter: @This() = .{ + .tree_id = 0, + .lockfile = lockfile, + }; + if (comptime path_style == .node_modules) { + @memcpy(iter.path_buf[0.."node_modules".len], "node_modules"); + } + return iter; + } - pub const DepthBuf = [max_depth]Id; + pub fn reset(this: *@This()) void { + this.tree_id = 0; + } - pub fn init(lockfile: *const Lockfile) Iterator { - var iter = Iterator{ - .tree_id = 0, - .lockfile = lockfile, - }; - @memcpy(iter.path_buf[0.."node_modules".len], "node_modules"); - return iter; - } + pub const Next = struct { + relative_path: stringZ, + dependencies: []const DependencyID, + tree_id: Tree.Id, - pub fn reset(this: *Iterator) void { - this.tree_id = 0; - } + /// depth of the node_modules folder in the tree + /// + /// 0 (./node_modules) + /// / \ + /// 1 1 + /// / + /// 2 + depth: usize, + }; - pub fn nextNodeModulesFolder(this: *Iterator, completed_trees: ?*Bitset) ?NodeModulesFolder { - const trees = this.lockfile.buffers.trees.items; + pub fn next(this: *@This(), completed_trees: if (path_style == .node_modules) ?*Bitset else void) ?Next { + const trees = this.lockfile.buffers.trees.items; - if (this.tree_id >= trees.len) return null; + if (this.tree_id >= trees.len) return null; - while (trees[this.tree_id].dependencies.len == 0) { - if (completed_trees) |_completed_trees| { - _completed_trees.set(this.tree_id); + while (trees[this.tree_id].dependencies.len == 0) { + if (comptime path_style == .node_modules) { + if (completed_trees) |_completed_trees| { + _completed_trees.set(this.tree_id); + } + } + this.tree_id += 1; + if (this.tree_id >= trees.len) return null; } - this.tree_id += 1; - if (this.tree_id >= trees.len) return null; - } - const current_tree_id = this.tree_id; - const tree = trees[current_tree_id]; - const tree_dependencies = tree.dependencies.get(this.lockfile.buffers.hoisted_dependencies.items); + const current_tree_id = this.tree_id; + const tree = trees[current_tree_id]; + const tree_dependencies = tree.dependencies.get(this.lockfile.buffers.hoisted_dependencies.items); - const relative_path, const depth = relativePathAndDepth( - this.lockfile, - current_tree_id, - &this.path_buf, - &this.depth_stack, - ); + const relative_path, const depth = relativePathAndDepth( + this.lockfile, + current_tree_id, + &this.path_buf, + &this.depth_stack, + path_style, + ); - this.tree_id += 1; + this.tree_id += 1; - return .{ - .relative_path = relative_path, - .dependencies = tree_dependencies, - .tree_id = current_tree_id, - .depth = depth, - }; - } - }; + return .{ + .relative_path = relative_path, + .dependencies = tree_dependencies, + .tree_id = current_tree_id, + .depth = depth, + }; + } + }; + } /// Returns relative path and the depth of the tree pub fn relativePathAndDepth( lockfile: *const Lockfile, tree_id: Id, path_buf: *bun.PathBuffer, - depth_buf: *Iterator.DepthBuf, + depth_buf: *DepthBuf, + comptime path_style: IteratorPathStyle, ) struct { stringZ, usize } { const trees = lockfile.buffers.trees.items; var depth: usize = 0; @@ -430,7 +582,10 @@ pub const Tree = struct { const tree = trees[tree_id]; var parent_id = tree.id; - var path_written: usize = "node_modules".len; + var path_written: usize = switch (comptime path_style) { + .node_modules => "node_modules".len, + .pkg_path => 0, + }; depth_buf[0] = 0; @@ -449,16 +604,25 @@ pub const Tree = struct { depth = depth_buf_len; while (depth_buf_len > 0) : (depth_buf_len -= 1) { - path_buf[path_written] = std.fs.path.sep; - path_written += 1; + if (comptime path_style == .pkg_path) { + if (depth_buf_len != depth) { + path_buf[path_written] = '/'; + path_written += 1; + } + } else { + path_buf[path_written] = std.fs.path.sep; + path_written += 1; + } const id = depth_buf[depth_buf_len]; - const name = dependencies[trees[id].dependency_id].name.slice(buf); + const name = trees[id].folderName(dependencies, buf); @memcpy(path_buf[path_written..][0..name.len], name); path_written += name.len; - @memcpy(path_buf[path_written..][0.."/node_modules".len], std.fs.path.sep_str ++ "node_modules"); - path_written += "/node_modules".len; + if (comptime path_style == .node_modules) { + @memcpy(path_buf[path_written..][0.."/node_modules".len], std.fs.path.sep_str ++ "node_modules"); + path_written += "/node_modules".len; + } } } path_buf[path_written] = 0; @@ -577,7 +741,6 @@ pub const Tree = struct { try next.hoistDependency( true, pid, - dep_id, &dependency, dependency_lists, trees, @@ -613,7 +776,6 @@ pub const Tree = struct { this: *Tree, comptime as_defined: bool, package_id: PackageID, - dependency_id: DependencyID, dependency: *const Dependency, dependency_lists: []Lockfile.DependencyIDList, trees: []Tree, @@ -681,7 +843,6 @@ pub const Tree = struct { const id = trees[this.parent].hoistDependency( false, package_id, - dependency_id, dependency, dependency_lists, trees, @@ -695,7 +856,7 @@ pub const Tree = struct { } }; -/// This conditonally clones the lockfile with root packages marked as non-resolved +/// This conditionally clones the lockfile with root packages marked as non-resolved /// that do not satisfy `Features`. The package may still end up installed even /// if it was e.g. in "devDependencies" and its a production install. In that case, /// it would be installed because another dependency or transient dependency needed it. @@ -758,8 +919,8 @@ fn preprocessUpdateRequests(old: *Lockfile, manager: *PackageManager, updates: [ if (old_resolution > old.packages.len) continue; const res = resolutions_of_yore[old_resolution]; const len = switch (exact_versions) { - false => std.fmt.count("^{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}), - true => std.fmt.count("{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}), + false => std.fmt.count("^{}", .{res.value.npm.version.fmt(old.buffers.string_bytes.items)}), + true => std.fmt.count("{}", .{res.value.npm.version.fmt(old.buffers.string_bytes.items)}), }; if (len >= String.max_inline_len) { string_builder.cap += len; @@ -789,8 +950,8 @@ fn preprocessUpdateRequests(old: *Lockfile, manager: *PackageManager, updates: [ if (old_resolution > old.packages.len) continue; const res = resolutions_of_yore[old_resolution]; const buf = switch (exact_versions) { - false => std.fmt.bufPrint(&temp_buf, "^{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}) catch break, - true => std.fmt.bufPrint(&temp_buf, "{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}) catch break, + false => std.fmt.bufPrint(&temp_buf, "^{}", .{res.value.npm.version.fmt(old.buffers.string_bytes.items)}) catch break, + true => std.fmt.bufPrint(&temp_buf, "{}", .{res.value.npm.version.fmt(old.buffers.string_bytes.items)}) catch break, }; const external_version = string_builder.append(ExternalString, buf); const sliced = external_version.value.sliced(old.buffers.string_bytes.items); @@ -860,6 +1021,7 @@ pub fn getWorkspacePkgIfWorkspaceDep(this: *const Lockfile, id: DependencyID) Pa } /// Does this tree id belong to a workspace (including workspace root)? +/// TODO(dylan-conway) fix! pub fn isWorkspaceTreeId(this: *const Lockfile, id: Tree.Id) bool { return id == 0 or this.buffers.dependencies.items[this.buffers.trees.items[id].dependency_id].behavior.isWorkspaceOnly(); } @@ -1235,7 +1397,7 @@ pub const Printer = struct { var lockfile = try allocator.create(Lockfile); - const load_from_disk = lockfile.loadFromDisk(null, allocator, log, lockfile_path, false); + const load_from_disk = lockfile.loadFromCwd(null, allocator, log, false); switch (load_from_disk) { .err => |cause| { switch (cause.step) { @@ -1992,7 +2154,7 @@ pub fn verifyData(this: *const Lockfile) !void { } } -pub fn saveToDisk(this: *Lockfile, filename: stringZ, verbose_log: bool) void { +pub fn saveToDisk(this: *Lockfile, save_format: LoadResult.LockfileFormat, verbose_log: bool) void { if (comptime Environment.allow_assert) { this.verifyData() catch |err| { Output.prettyErrorln("error: failed to verify lockfile: {s}", .{@errorName(err)}); @@ -2001,10 +2163,16 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ, verbose_log: bool) void { assert(FileSystem.instance_loaded); } - var bytes = std.ArrayList(u8).init(bun.default_allocator); - defer bytes.deinit(); + const timer = std.time.Timer.start() catch unreachable; + const bytes = if (save_format == .text) + TextLockfile.Stringifier.saveFromBinary(bun.default_allocator, this) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + } + } + else bytes: { + var bytes = std.ArrayList(u8).init(bun.default_allocator); - { var total_size: usize = 0; var end_pos: usize = 0; Lockfile.Serializer.save(this, verbose_log, &bytes, &total_size, &end_pos) catch |err| { @@ -2013,12 +2181,19 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ, verbose_log: bool) void { }; if (bytes.items.len >= end_pos) bytes.items[end_pos..][0..@sizeOf(usize)].* = @bitCast(total_size); - } + break :bytes bytes.items; + }; + defer bun.default_allocator.free(bytes); + _ = timer; + // std.debug.print("time to write {s}: {}\n", .{ @tagName(save_format), bun.fmt.fmtDuration(timer.read()) }); var tmpname_buf: [512]u8 = undefined; var base64_bytes: [8]u8 = undefined; bun.rand(&base64_bytes); - const tmpname = std.fmt.bufPrintZ(&tmpname_buf, ".lockb-{s}.tmp", .{bun.fmt.fmtSliceHexLower(&base64_bytes)}) catch unreachable; + const tmpname = if (save_format == .text) + std.fmt.bufPrintZ(&tmpname_buf, ".lock-{s}.tmp", .{bun.fmt.fmtSliceHexLower(&base64_bytes)}) catch unreachable + else + std.fmt.bufPrintZ(&tmpname_buf, ".lockb-{s}.tmp", .{bun.fmt.fmtSliceHexLower(&base64_bytes)}) catch unreachable; const file = switch (File.openat(std.fs.cwd(), tmpname, bun.O.CREAT | bun.O.WRONLY, 0o777)) { .err => |err| { @@ -2028,7 +2203,7 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ, verbose_log: bool) void { .result => |f| f, }; - switch (file.writeAll(bytes.items)) { + switch (file.writeAll(bytes)) { .err => |e| { file.close(); _ = bun.sys.unlink(tmpname); @@ -2051,7 +2226,7 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ, verbose_log: bool) void { } } - file.closeAndMoveTo(tmpname, filename) catch |err| { + file.closeAndMoveTo(tmpname, save_format.filename()) catch |err| { bun.handleErrorReturnTrace(err, @errorReturnTrace()); // note: file is already closed here. @@ -2121,7 +2296,7 @@ pub fn getPackageID( const buf = this.buffers.string_bytes.items; switch (entry) { - .PackageID => |id| { + .id => |id| { if (comptime Environment.allow_assert) assert(id < resolutions.len); if (resolutions[id].eql(resolution, buf, buf)) { @@ -2132,7 +2307,7 @@ pub fn getPackageID( if (npm_version.?.satisfies(resolutions[id].value.npm.version, buf, buf)) return id; } }, - .PackageIDMultiple => |ids| { + .ids => |ids| { for (ids.items) |id| { if (comptime Environment.allow_assert) assert(id < resolutions.len); @@ -2150,14 +2325,79 @@ pub fn getPackageID( return null; } -pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) !void { +/// Appends `pkg` to `this.packages` if a duplicate isn't found +pub fn appendPackageDedupe(this: *Lockfile, pkg: *Package, buf: string) OOM!PackageID { + const entry = try this.package_index.getOrPut(pkg.name_hash); + + if (!entry.found_existing) { + const new_id: PackageID = @intCast(this.packages.len); + pkg.meta.id = new_id; + try this.packages.append(this.allocator, pkg.*); + entry.value_ptr.* = .{ .id = new_id }; + return new_id; + } + + const resolutions = this.packages.items(.resolution); + + return switch (entry.value_ptr.*) { + .id => |existing_id| { + if (pkg.resolution.eql(&resolutions[existing_id], buf, buf)) { + pkg.meta.id = existing_id; + return existing_id; + } + + const new_id: PackageID = @intCast(this.packages.len); + pkg.meta.id = new_id; + try this.packages.append(this.allocator, pkg.*); + + var ids = try PackageIDList.initCapacity(this.allocator, 8); + ids.items.len = 2; + + ids.items[0..2].* = if (pkg.resolution.order(&resolutions[existing_id], buf, buf) == .gt) + .{ new_id, existing_id } + else + .{ existing_id, new_id }; + + entry.value_ptr.* = .{ + .ids = ids, + }; + + return new_id; + }, + .ids => |*existing_ids| { + for (existing_ids.items) |existing_id| { + if (pkg.resolution.eql(&resolutions[existing_id], buf, buf)) { + pkg.meta.id = existing_id; + return existing_id; + } + } + + const new_id: PackageID = @intCast(this.packages.len); + pkg.meta.id = new_id; + try this.packages.append(this.allocator, pkg.*); + + for (existing_ids.items, 0..) |existing_id, i| { + if (pkg.resolution.order(&resolutions[existing_id], buf, buf) == .gt) { + try existing_ids.insert(this.allocator, i, new_id); + return new_id; + } + } + + try existing_ids.append(this.allocator, new_id); + + return new_id; + }, + }; +} + +pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) OOM!void { const gpe = try this.package_index.getOrPut(name_hash); if (gpe.found_existing) { const index: *PackageIndex.Entry = gpe.value_ptr; switch (index.*) { - .PackageID => |existing_id| { + .id => |existing_id| { var ids = try PackageIDList.initCapacity(this.allocator, 8); ids.items.len = 2; @@ -2170,10 +2410,10 @@ pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) !v .{ existing_id, id }; index.* = .{ - .PackageIDMultiple = ids, + .ids = ids, }; }, - .PackageIDMultiple => |*existing_ids| { + .ids => |*existing_ids| { const resolutions = this.packages.items(.resolution); const buf = this.buffers.string_bytes.items; @@ -2189,16 +2429,16 @@ pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) !v }, } } else { - gpe.value_ptr.* = .{ .PackageID = id }; + gpe.value_ptr.* = .{ .id = id }; } } -pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) !Lockfile.Package { +pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) OOM!Lockfile.Package { const id: PackageID = @truncate(this.packages.len); return try appendPackageWithID(this, package_, id); } -fn appendPackageWithID(this: *Lockfile, package_: Lockfile.Package, id: PackageID) !Lockfile.Package { +fn appendPackageWithID(this: *Lockfile, package_: Lockfile.Package, id: PackageID) OOM!Lockfile.Package { defer { if (comptime Environment.allow_assert) { assert(this.getPackageID(package_.name_hash, null, &package_.resolution) != null); @@ -2220,6 +2460,13 @@ pub inline fn stringBuilder(this: *Lockfile) Lockfile.StringBuilder { }; } +pub fn stringBuf(this: *Lockfile) String.Buf { + return .{ + .bytes = this.buffers.string_bytes.toManaged(this.allocator), + .pool = this.string_pool, + }; +} + pub const Scratch = struct { pub const DuplicateCheckerMap = std.HashMap(PackageNameHash, logger.Loc, IdentityContext(PackageNameHash), 80); pub const DependencyQueue = std.fifo.LinearFifo(DependencySlice, .Dynamic); @@ -2369,12 +2616,12 @@ pub const StringBuilder = struct { pub const PackageIndex = struct { pub const Map = std.HashMap(PackageNameHash, PackageIndex.Entry, IdentityContext(PackageNameHash), 80); pub const Entry = union(Tag) { - PackageID: PackageID, - PackageIDMultiple: PackageIDList, + id: PackageID, + ids: PackageIDList, pub const Tag = enum(u8) { - PackageID = 0, - PackageIDMultiple = 1, + id = 0, + ids = 1, }; }; }; @@ -2389,7 +2636,7 @@ pub const OverrideMap = struct { map: std.ArrayHashMapUnmanaged(PackageNameHash, Dependency, ArrayIdentityContext.U64, false) = .{}, /// In the future, this `get` function should handle multi-level resolutions. This is difficult right - /// now because given a Dependency ID, there is no fast way to trace it to it's package. + /// now because given a Dependency ID, there is no fast way to trace it to its package. /// /// A potential approach is to add another buffer to the lockfile that maps Dependency ID to Package ID, /// and from there `OverrideMap.map` can have a union as the value, where the union is between "override all" @@ -2918,7 +3165,7 @@ pub const Package = extern struct { } switch (resolution_tag) { - .git, .github, .gitlab, .root => { + .git, .github, .root => { const prepare_scripts = .{ "preprepare", "prepare", @@ -2988,7 +3235,7 @@ pub const Package = extern struct { .first_index = @intCast(first_index), .total = total, .cwd = allocator.dupeZ(u8, cwd) catch bun.outOfMemory(), - .package_name = package_name, + .package_name = lockfile.allocator.dupe(u8, package_name) catch bun.outOfMemory(), }; } @@ -3954,7 +4201,7 @@ pub const Package = extern struct { comptime features: Features, ) !void { initializeStore(); - const json = JSON.parsePackageJSONUTF8AlwaysDecode(&source, log, allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(&source, log, allocator) catch |err| { log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), source.path.prettyDir() }); Global.crash(); @@ -6324,7 +6571,7 @@ pub const Serializer = struct { lockfile.scratch = Lockfile.Scratch.init(allocator); lockfile.package_index = PackageIndex.Map.initContext(allocator, .{}); - lockfile.string_pool = StringPool.initContext(allocator, .{}); + lockfile.string_pool = StringPool.init(allocator); try lockfile.package_index.ensureTotalCapacity(@as(u32, @truncate(lockfile.packages.len))); if (!has_workspace_name_hashes) { @@ -6471,7 +6718,7 @@ pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Ve switch (version.tag) { .npm => switch (entry) { - .PackageID => |id| { + .id => |id| { const resolutions = this.packages.items(.resolution); if (comptime Environment.allow_assert) assert(id < resolutions.len); @@ -6479,7 +6726,7 @@ pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Ve return id; } }, - .PackageIDMultiple => |ids| { + .ids => |ids| { const resolutions = this.packages.items(.resolution); for (ids.items) |id| { @@ -6569,7 +6816,6 @@ pub fn hasTrustedDependency(this: *Lockfile, name: []const u8) bool { pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep_id: DependencyID, dep: Dependency, res: PackageID) !void { const sb = this.buffers.string_bytes.items; - var buf: [2048]u8 = undefined; try w.beginObject(); defer w.endObject() catch {}; @@ -6598,7 +6844,7 @@ pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep_id: Depend try w.write(info.name.slice(sb)); try w.objectField("version"); - try w.write(try std.fmt.bufPrint(&buf, "{}", .{info.version.fmt(sb)})); + try w.print("\"{}\"", .{info.version.fmt(sb)}); }, .dist_tag => { try w.beginObject(); @@ -6692,7 +6938,6 @@ pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep_id: Depend } pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { - var buf: [2048]u8 = undefined; const sb = this.buffers.string_bytes.items; try w.beginObject(); defer w.endObject() catch {}; @@ -6711,14 +6956,14 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { while (iter.next()) |it| { const entry: PackageIndex.Entry = it.value_ptr.*; const first_id = switch (entry) { - .PackageID => |id| id, - .PackageIDMultiple => |ids| ids.items[0], + .id => |id| id, + .ids => |ids| ids.items[0], }; const name = this.packages.items(.name)[first_id].slice(sb); try w.objectField(name); switch (entry) { - .PackageID => |id| try w.write(id), - .PackageIDMultiple => |ids| { + .id => |id| try w.write(id), + .ids => |ids| { try w.beginArray(); for (ids.items) |id| { try w.write(id); @@ -6736,7 +6981,7 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { const dependencies = this.buffers.dependencies.items; const hoisted_deps = this.buffers.hoisted_dependencies.items; const resolutions = this.buffers.resolutions.items; - var depth_buf: Tree.Iterator.DepthBuf = undefined; + var depth_buf: Tree.DepthBuf = undefined; var path_buf: bun.PathBuffer = undefined; @memcpy(path_buf[0.."node_modules".len], "node_modules"); @@ -6754,11 +6999,11 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { @intCast(tree_id), &path_buf, &depth_buf, + .node_modules, ); try w.objectField("path"); - const formatted = try std.fmt.bufPrint(&buf, "{}", .{bun.fmt.fmtPath(u8, relative_path, .{ .path_sep = .posix })}); - try w.write(formatted); + try w.print("\"{}\"", .{bun.fmt.fmtPath(u8, relative_path, .{ .path_sep = .posix })}); try w.objectField("depth"); try w.write(depth); @@ -6832,12 +7077,10 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { try w.write(@tagName(res.tag)); try w.objectField("value"); - const formatted = try std.fmt.bufPrint(&buf, "{s}", .{res.fmt(sb, .posix)}); - try w.write(formatted); + try w.print("\"{s}\"", .{res.fmt(sb, .posix)}); try w.objectField("resolved"); - const formatted_url = try std.fmt.bufPrint(&buf, "{}", .{res.fmtURL(sb)}); - try w.write(formatted_url); + try w.print("\"{}\"", .{res.fmtURL(sb)}); } try w.objectField("dependencies"); @@ -6876,7 +7119,7 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { try w.objectField("integrity"); if (pkg.meta.integrity.tag != .unknown) { - try w.write(try std.fmt.bufPrint(&buf, "{}", .{pkg.meta.integrity})); + try w.print("\"{}\"", .{pkg.meta.integrity}); } else { try w.write(null); } @@ -6940,13 +7183,15 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { } } + var buf: [100]u8 = undefined; + try w.objectField("workspace_paths"); { try w.beginObject(); defer w.endObject() catch {}; for (this.workspace_paths.keys(), this.workspace_paths.values()) |k, v| { - try w.objectField(try std.fmt.bufPrint(&buf, "{d}", .{k})); + try w.objectField(std.fmt.bufPrintIntToSlice(&buf, k, 10, .lower, .{})); try w.write(v.slice(sb)); } } @@ -6956,8 +7201,8 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { defer w.endObject() catch {}; for (this.workspace_versions.keys(), this.workspace_versions.values()) |k, v| { - try w.objectField(try std.fmt.bufPrint(&buf, "{d}", .{k})); - try w.write(try std.fmt.bufPrint(&buf, "{}", .{v.fmt(sb)})); + try w.objectField(std.fmt.bufPrintIntToSlice(&buf, k, 10, .lower, .{})); + try w.print("\"{}\"", .{v.fmt(sb)}); } } } diff --git a/src/install/migration.zig b/src/install/migration.zig index 87dfc373cab4cc..ca04d48732f4b1 100644 --- a/src/install/migration.zig +++ b/src/install/migration.zig @@ -10,6 +10,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const logger = bun.logger; +const File = bun.sys.File; const Install = @import("./install.zig"); const Resolution = @import("./resolution.zig").Resolution; @@ -25,7 +26,7 @@ const ExternalString = Semver.ExternalString; const stringHash = String.Builder.stringHash; const Lockfile = @import("./lockfile.zig"); -const LoadFromDiskResult = Lockfile.LoadFromDiskResult; +const LoadResult = Lockfile.LoadResult; const JSAst = bun.JSAst; const Expr = JSAst.Expr; @@ -38,32 +39,21 @@ const debug = Output.scoped(.migrate, false); pub fn detectAndLoadOtherLockfile( this: *Lockfile, + dir: bun.FD, manager: *Install.PackageManager, allocator: Allocator, log: *logger.Log, - bun_lockfile_path: stringZ, -) LoadFromDiskResult { - const dirname = bun_lockfile_path[0 .. strings.lastIndexOfChar(bun_lockfile_path, '/') orelse 0]; +) LoadResult { // check for package-lock.json, yarn.lock, etc... // if it exists, do an in-memory migration - var buf: bun.PathBuffer = undefined; - @memcpy(buf[0..dirname.len], dirname); npm: { - const npm_lockfile_name = "package-lock.json"; - @memcpy(buf[dirname.len .. dirname.len + npm_lockfile_name.len], npm_lockfile_name); - buf[dirname.len + npm_lockfile_name.len] = 0; var timer = std.time.Timer.start() catch unreachable; - const lockfile = bun.sys.openat( - bun.FD.cwd(), - buf[0 .. dirname.len + npm_lockfile_name.len :0], - bun.O.RDONLY, - 0, - ).unwrap() catch break :npm; - defer _ = bun.sys.close(lockfile); + const lockfile = File.openat(dir, "package-lock.json", bun.O.RDONLY, 0).unwrap() catch break :npm; + defer lockfile.close(); var lockfile_path_buf: bun.PathBuffer = undefined; - const lockfile_path = bun.getFdPathZ(lockfile, &lockfile_path_buf) catch break :npm; - const data = bun.sys.File.from(lockfile).readToEnd(allocator).unwrap() catch break :npm; + const lockfile_path = bun.getFdPathZ(lockfile.handle, &lockfile_path_buf) catch break :npm; + const data = lockfile.readToEnd(allocator).unwrap() catch break :npm; const migrate_result = migrateNPMLockfile(this, manager, allocator, log, data, lockfile_path) catch |err| { if (err == error.NPMLockfileVersionMismatch) { Output.prettyErrorln( @@ -81,7 +71,12 @@ pub fn detectAndLoadOtherLockfile( Output.prettyErrorln("Invalid NPM package-lock.json\nIn a release build, this would ignore and do a fresh install.\nAborting", .{}); Global.exit(1); } - return LoadFromDiskResult{ .err = .{ .step = .migrating, .value = err } }; + return LoadResult{ .err = .{ + .step = .migrating, + .value = err, + .lockfile_path = "package-lock.json", + .format = .binary, + } }; }; if (migrate_result == .ok) { @@ -94,7 +89,7 @@ pub fn detectAndLoadOtherLockfile( return migrate_result; } - return LoadFromDiskResult{ .not_found = {} }; + return LoadResult{ .not_found = {} }; } const ResolvedURLsMap = bun.StringHashMapUnmanaged(string); @@ -130,7 +125,7 @@ pub fn migrateNPMLockfile( log: *logger.Log, data: string, abs_path: string, -) !LoadFromDiskResult { +) !LoadResult { debug("begin lockfile migration", .{}); this.initEmpty(allocator); @@ -553,7 +548,7 @@ pub fn migrateNPMLockfile( } else .false, .integrity = if (pkg.get("integrity")) |integrity| - try Integrity.parse( + Integrity.parse( integrity.asString(this.allocator) orelse return error.InvalidNPMLockfile, ) @@ -1068,7 +1063,7 @@ pub fn migrateNPMLockfile( // } // This is definitely a memory leak, but it's fine because there is no install api, so this can only be leaked once per process. - // This operation is neccecary because callers of `loadFromDisk` assume the data is written into the passed `this`. + // This operation is neccecary because callers of `loadFromCwd` assume the data is written into the passed `this`. // You'll find that not cleaning the lockfile will cause `bun install` to not actually install anything since it doesnt have any hoisted trees. this.* = (try this.cleanWithLogger(manager, &.{}, log, false, .silent)).*; @@ -1084,11 +1079,13 @@ pub fn migrateNPMLockfile( this.meta_hash = try this.generateMetaHash(false, this.packages.len); - return LoadFromDiskResult{ + return LoadResult{ .ok = .{ .lockfile = this, .was_migrated = true, + .loaded_from_binary_lockfile = false, .serializer_result = .{}, + .format = .text, }, }; } diff --git a/src/install/npm.zig b/src/install/npm.zig index 4c5210f86d9440..3eeb230c780ee9 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -530,7 +530,7 @@ const ExternVersionMap = extern struct { } }; -fn Negatable(comptime T: type) type { +pub fn Negatable(comptime T: type) type { return struct { added: T = T.none, removed: T = T.none, @@ -578,6 +578,11 @@ fn Negatable(comptime T: type) type { return; } + if (strings.eqlComptime(str, "none")) { + this.had_unrecognized_values = true; + return; + } + const is_not = str[0] == '!'; const offset: usize = @intFromBool(is_not); @@ -593,6 +598,74 @@ fn Negatable(comptime T: type) type { this.* = .{ .added = @enumFromInt(@intFromEnum(this.added) | field), .removed = this.removed }; } } + + pub fn fromJson(allocator: std.mem.Allocator, expr: JSON.Expr) OOM!T { + var this = T.none.negatable(); + switch (expr.data) { + .e_array => |arr| { + const items = arr.slice(); + if (items.len > 0) { + for (items) |item| { + if (item.asString(allocator)) |value| { + this.apply(value); + } + } + } + }, + .e_string => |str| { + this.apply(str.data); + }, + else => {}, + } + + return this.combine(); + } + + /// writes to a one line json array with a trailing comma and space, or writes a string + pub fn toJson(field: T, writer: anytype) @TypeOf(writer).Error!void { + if (field == .none) { + // [] means everything, so unrecognized value + try writer.writeAll( + \\"none" + ); + return; + } + + const kvs = T.NameMap.kvs; + var removed: u8 = 0; + for (kvs) |kv| { + if (!field.has(kv.value)) { + removed += 1; + } + } + const included = kvs.len - removed; + const print_included = removed > kvs.len - removed; + + const one = (print_included and included == 1) or (!print_included and removed == 1); + + if (!one) { + try writer.writeAll("[ "); + } + + for (kvs) |kv| { + const has = field.has(kv.value); + if (has and print_included) { + try writer.print( + \\"{s}" + , .{kv.key}); + if (one) return; + try writer.writeAll(", "); + } else if (!has and !print_included) { + try writer.print( + \\"!{s}" + , .{kv.key}); + if (one) return; + try writer.writeAll(", "); + } + } + + try writer.writeByte(']'); + } }; } @@ -1759,69 +1832,15 @@ pub const PackageManifest = struct { var package_version: PackageVersion = empty_version; if (prop.value.?.asProperty("cpu")) |cpu_q| { - var cpu = Architecture.none.negatable(); - - switch (cpu_q.expr.data) { - .e_array => |arr| { - const items = arr.slice(); - if (items.len > 0) { - for (items) |item| { - if (item.asString(allocator)) |cpu_str_| { - cpu.apply(cpu_str_); - } - } - } - }, - .e_string => |stri| { - cpu.apply(stri.data); - }, - else => {}, - } - package_version.cpu = cpu.combine(); + package_version.cpu = try Negatable(Architecture).fromJson(allocator, cpu_q.expr); } if (prop.value.?.asProperty("os")) |os_q| { - var os = OperatingSystem.none.negatable(); - - switch (os_q.expr.data) { - .e_array => |arr| { - const items = arr.slice(); - if (items.len > 0) { - for (items) |item| { - if (item.asString(allocator)) |cpu_str_| { - os.apply(cpu_str_); - } - } - } - }, - .e_string => |stri| { - os.apply(stri.data); - }, - else => {}, - } - package_version.os = os.combine(); + package_version.os = try Negatable(OperatingSystem).fromJson(allocator, os_q.expr); } if (prop.value.?.asProperty("libc")) |libc| { - var libc_ = Libc.none.negatable(); - - switch (libc.expr.data) { - .e_array => |arr| { - const items = arr.slice(); - if (items.len > 0) { - for (items) |item| { - if (item.asString(allocator)) |libc_str_| { - libc_.apply(libc_str_); - } - } - } - }, - .e_string => |stri| { - libc_.apply(stri.data); - }, - else => {}, - } - package_version.libc = libc_.combine(); + package_version.libc = try Negatable(Libc).fromJson(allocator, libc.expr); } if (prop.value.?.asProperty("hasInstallScript")) |has_install_script| { @@ -1973,7 +1992,7 @@ pub const PackageManifest = struct { if (dist.expr.asProperty("integrity")) |shasum| { if (shasum.expr.asString(allocator)) |shasum_str| { - package_version.integrity = Integrity.parse(shasum_str) catch Integrity{}; + package_version.integrity = Integrity.parse(shasum_str); if (package_version.integrity.tag.isSupported()) break :integrity; } } diff --git a/src/install/patch_install.zig b/src/install/patch_install.zig index 3005a0c548e41c..cb8b932aa12960 100644 --- a/src/install/patch_install.zig +++ b/src/install/patch_install.zig @@ -9,6 +9,7 @@ const Environment = bun.Environment; const strings = bun.strings; const MutableString = bun.MutableString; const Progress = bun.Progress; +const String = bun.Semver.String; const logger = bun.logger; const Loc = logger.Loc; @@ -80,7 +81,7 @@ pub const PatchTask = struct { name_and_version_hash: u64, resolution: *const Resolution, patchfilepath: []const u8, - pkgname: []const u8, + pkgname: String, cache_dir: std.fs.Dir, cache_dir_subpath: stringZ, @@ -103,7 +104,6 @@ pub const PatchTask = struct { .apply => { this.manager.allocator.free(this.callback.apply.patchfilepath); this.manager.allocator.free(this.callback.apply.cache_dir_subpath); - this.manager.allocator.free(this.callback.apply.pkgname); if (this.callback.apply.install_context) |ictx| ictx.path.deinit(); this.callback.apply.logger.deinit(); }, @@ -564,7 +564,7 @@ pub const PatchTask = struct { .name_and_version_hash = name_and_version_hash, .cache_dir = stuff.cache_dir, .patchfilepath = patchfilepath, - .pkgname = pkg_manager.allocator.dupe(u8, pkg_name.slice(pkg_manager.lockfile.buffers.string_bytes.items)) catch bun.outOfMemory(), + .pkgname = pkg_name, .logger = logger.Log.init(pkg_manager.allocator), // need to dupe this as it's calculated using // `PackageManager.cached_package_folder_name_buf` which may be diff --git a/src/install/repository.zig b/src/install/repository.zig index 848731b7cbd6ee..8e016bf9656589 100644 --- a/src/install/repository.zig +++ b/src/install/repository.zig @@ -17,6 +17,7 @@ const strings = @import("../string_immutable.zig"); const GitSHA = String; const Path = bun.path; const File = bun.sys.File; +const OOM = bun.OOM; threadlocal var final_path_buf: bun.PathBuffer = undefined; threadlocal var ssh_path_buf: bun.PathBuffer = undefined; @@ -181,6 +182,51 @@ pub const Repository = extern struct { .{ "gitlab", ".com" }, }); + pub fn parseAppendGit(input: string, buf: *String.Buf) OOM!Repository { + var remain = input; + if (strings.hasPrefixComptime(remain, "git+")) { + remain = remain["git+".len..]; + } + if (strings.lastIndexOfChar(remain, '#')) |hash| { + return .{ + .repo = try buf.append(remain[0..hash]), + .committish = try buf.append(remain[hash + 1 ..]), + }; + } + return .{ + .repo = try buf.append(remain), + }; + } + + pub fn parseAppendGithub(input: string, buf: *String.Buf) OOM!Repository { + var remain = input; + if (strings.hasPrefixComptime(remain, "github:")) { + remain = remain["github:".len..]; + } + var hash: usize = 0; + var slash: usize = 0; + for (remain, 0..) |c, i| { + switch (c) { + '/' => slash = i, + '#' => hash = i, + else => {}, + } + } + + const repo = if (hash == 0) remain[slash + 1 ..] else remain[slash + 1 .. hash]; + + var result: Repository = .{ + .owner = try buf.append(remain[0..slash]), + .repo = try buf.append(repo), + }; + + if (hash != 0) { + result.committish = try buf.append(remain[hash + 1 ..]); + } + + return result; + } + pub fn createDependencyNameFromVersionLiteral( allocator: std.mem.Allocator, repository: *const Repository, @@ -260,6 +306,14 @@ pub const Repository = extern struct { return try formatter.format(layout, opts, writer); } + pub fn fmt(this: *const Repository, label: string, buf: []const u8) Formatter { + return .{ + .repository = this, + .buf = buf, + .label = label, + }; + } + pub const Formatter = struct { label: []const u8 = "", buf: []const u8, diff --git a/src/install/resolution.zig b/src/install/resolution.zig index 2fecc21aff94d2..d51829b1d40dbd 100644 --- a/src/install/resolution.zig +++ b/src/install/resolution.zig @@ -10,6 +10,9 @@ const strings = @import("../string_immutable.zig"); const VersionedURL = @import("./versioned_url.zig").VersionedURL; const bun = @import("root").bun; const Path = bun.path; +const JSON = bun.JSON; +const OOM = bun.OOM; +const Dependency = bun.install.Dependency; pub const Resolution = extern struct { tag: Tag = .uninitialized, @@ -32,6 +35,74 @@ pub const Resolution = extern struct { return this.tag.canEnqueueInstallTask(); } + const FromTextLockfileError = OOM || error{ + UnexpectedResolution, + InvalidSemver, + }; + + pub fn fromTextLockfile(res_str: string, string_buf: *String.Buf) FromTextLockfileError!Resolution { + if (strings.hasPrefixComptime(res_str, "root:")) { + return Resolution.init(.{ .root = {} }); + } + + if (strings.withoutPrefixIfPossibleComptime(res_str, "link:")) |link| { + return Resolution.init(.{ .symlink = try string_buf.append(link) }); + } + + if (strings.withoutPrefixIfPossibleComptime(res_str, "workspace:")) |workspace| { + return Resolution.init(.{ .workspace = try string_buf.append(workspace) }); + } + + if (strings.withoutPrefixIfPossibleComptime(res_str, "file:")) |folder| { + return Resolution.init(.{ .folder = try string_buf.append(folder) }); + } + + return switch (Dependency.Version.Tag.infer(res_str)) { + .git => Resolution.init(.{ .git = try Repository.parseAppendGit(res_str, string_buf) }), + .github => Resolution.init(.{ .github = try Repository.parseAppendGithub(res_str, string_buf) }), + .tarball => { + if (Dependency.isRemoteTarball(res_str)) { + return Resolution.init(.{ .remote_tarball = try string_buf.append(res_str) }); + } + + return Resolution.init(.{ .local_tarball = try string_buf.append(res_str) }); + }, + .npm => { + const version_literal = try string_buf.append(res_str); + const parsed = Semver.Version.parse(version_literal.sliced(string_buf.bytes.items)); + + if (!parsed.valid) { + return error.UnexpectedResolution; + } + + if (parsed.version.major == null or parsed.version.minor == null or parsed.version.patch == null) { + return error.UnexpectedResolution; + } + + return .{ + .tag = .npm, + .value = .{ + .npm = .{ + .version = parsed.version.min(), + + // will fill this later + .url = .{}, + }, + }, + }; + }, + + // covered above + .workspace => error.UnexpectedResolution, + .symlink => error.UnexpectedResolution, + .folder => error.UnexpectedResolution, + + // should not happen + .dist_tag => error.UnexpectedResolution, + .uninitialized => error.UnexpectedResolution, + }; + } + pub fn order( lhs: *const Resolution, rhs: *const Resolution, @@ -52,7 +123,6 @@ pub const Resolution = extern struct { .single_file_module => lhs.value.single_file_module.order(&rhs.value.single_file_module, lhs_buf, rhs_buf), .git => lhs.value.git.order(&rhs.value.git, lhs_buf, rhs_buf), .github => lhs.value.github.order(&rhs.value.github, lhs_buf, rhs_buf), - .gitlab => lhs.value.gitlab.order(&rhs.value.gitlab, lhs_buf, rhs_buf), else => .eq, }; } @@ -68,7 +138,6 @@ pub const Resolution = extern struct { .single_file_module => builder.count(this.value.single_file_module.slice(buf)), .git => this.value.git.count(buf, Builder, builder), .github => this.value.github.count(buf, Builder, builder), - .gitlab => this.value.gitlab.count(buf, Builder, builder), else => {}, } } @@ -102,9 +171,6 @@ pub const Resolution = extern struct { .github => Value.init(.{ .github = this.value.github.clone(buf, Builder, builder), }), - .gitlab => Value.init(.{ - .gitlab = this.value.gitlab.clone(buf, Builder, builder), - }), .root => Value.init(.{ .root = {} }), else => { std.debug.panic("Internal error: unexpected resolution tag: {}", .{this.tag}); @@ -180,11 +246,6 @@ pub const Resolution = extern struct { lhs_string_buf, rhs_string_buf, ), - .gitlab => lhs.value.gitlab.eql( - &rhs.value.gitlab, - lhs_string_buf, - rhs_string_buf, - ), else => unreachable, }; } @@ -204,7 +265,6 @@ pub const Resolution = extern struct { .remote_tarball => try writer.writeAll(value.remote_tarball.slice(formatter.buf)), .git => try value.git.formatAs("git+", formatter.buf, layout, opts, writer), .github => try value.github.formatAs("github:", formatter.buf, layout, opts, writer), - .gitlab => try value.gitlab.formatAs("gitlab:", formatter.buf, layout, opts, writer), .workspace => try std.fmt.format(writer, "workspace:{s}", .{value.workspace.slice(formatter.buf)}), .symlink => try std.fmt.format(writer, "link:{s}", .{value.symlink.slice(formatter.buf)}), .single_file_module => try std.fmt.format(writer, "module:{s}", .{value.single_file_module.slice(formatter.buf)}), @@ -228,7 +288,6 @@ pub const Resolution = extern struct { .remote_tarball => try writer.writeAll(value.remote_tarball.slice(buf)), .git => try value.git.formatAs("git+", buf, layout, opts, writer), .github => try value.github.formatAs("github:", buf, layout, opts, writer), - .gitlab => try value.gitlab.formatAs("gitlab:", buf, layout, opts, writer), .workspace => try std.fmt.format(writer, "workspace:{s}", .{bun.fmt.fmtPath(u8, value.workspace.slice(buf), .{ .path_sep = formatter.path_sep, })}), @@ -256,7 +315,6 @@ pub const Resolution = extern struct { .remote_tarball => try writer.writeAll(formatter.resolution.value.remote_tarball.slice(formatter.buf)), .git => try formatter.resolution.value.git.formatAs("git+", formatter.buf, layout, opts, writer), .github => try formatter.resolution.value.github.formatAs("github:", formatter.buf, layout, opts, writer), - .gitlab => try formatter.resolution.value.gitlab.formatAs("gitlab:", formatter.buf, layout, opts, writer), .workspace => try std.fmt.format(writer, "workspace:{s}", .{formatter.resolution.value.workspace.slice(formatter.buf)}), .symlink => try std.fmt.format(writer, "link:{s}", .{formatter.resolution.value.symlink.slice(formatter.buf)}), .single_file_module => try std.fmt.format(writer, "module:{s}", .{formatter.resolution.value.single_file_module.slice(formatter.buf)}), @@ -282,7 +340,6 @@ pub const Resolution = extern struct { git: Repository, github: Repository, - gitlab: Repository, workspace: String, @@ -306,7 +363,6 @@ pub const Resolution = extern struct { local_tarball = 8, github = 16, - gitlab = 24, git = 32, @@ -338,7 +394,7 @@ pub const Resolution = extern struct { _, pub fn isGit(this: Tag) bool { - return this == .git or this == .github or this == .gitlab; + return this == .git or this == .github; } pub fn canEnqueueInstallTask(this: Tag) bool { diff --git a/src/install/semver.zig b/src/install/semver.zig index fc90129b67d560..84c7fdba0d9cdb 100644 --- a/src/install/semver.zig +++ b/src/install/semver.zig @@ -12,6 +12,9 @@ const default_allocator = bun.default_allocator; const C = bun.C; const JSC = bun.JSC; const IdentityContext = @import("../identity_context.zig").IdentityContext; +const OOM = bun.OOM; +const TruncatedPackageNameHash = bun.install.TruncatedPackageNameHash; +const Lockfile = bun.install.Lockfile; /// String type that stores either an offset/length into an external buffer or a string inline directly pub const String = extern struct { @@ -35,6 +38,106 @@ pub const String = extern struct { return String.init(inlinable_buffer, inlinable_buffer); } + pub const Buf = struct { + bytes: std.ArrayList(u8), + pool: Builder.StringPool, + + pub fn init(allocator: std.mem.Allocator) Buf { + return .{ + .bytes = std.ArrayList(u8).init(allocator), + .pool = Builder.StringPool.init(allocator), + }; + } + + pub fn apply(this: *Buf, lockfile: *Lockfile) void { + lockfile.buffers.string_bytes = this.bytes.moveToUnmanaged(); + lockfile.string_pool = this.pool; + } + + pub fn append(this: *Buf, str: string) OOM!String { + if (canInline(str)) { + return String.initInline(str); + } + + const hash = Builder.stringHash(str); + const entry = try this.pool.getOrPut(hash); + if (entry.found_existing) { + return entry.value_ptr.*; + } + + // new entry + const new = try String.initAppend(&this.bytes, str); + entry.value_ptr.* = new; + return new; + } + + pub fn appendWithHash(this: *Buf, str: string, hash: u64) OOM!String { + if (canInline(str)) { + return initInline(str); + } + + const entry = try this.pool.getOrPut(hash); + if (entry.found_existing) { + return entry.value_ptr.*; + } + + // new entry + const new = try String.initAppend(&this.bytes, str); + entry.value_ptr.* = new; + return new; + } + + pub fn appendExternal(this: *Buf, str: string) OOM!ExternalString { + const hash = Builder.stringHash(str); + + if (canInline(str)) { + return .{ + .value = String.initInline(str), + .hash = hash, + }; + } + + const entry = try this.pool.getOrPut(hash); + if (entry.found_existing) { + return .{ + .value = entry.value_ptr.*, + .hash = hash, + }; + } + + const new = try String.initAppend(&this.bytes, str); + entry.value_ptr.* = new; + return .{ + .value = new, + .hash = hash, + }; + } + + pub fn appendExternalWithHash(this: *Buf, str: string, hash: u64) OOM!ExternalString { + if (canInline(str)) { + return .{ + .value = initInline(str), + .hash = hash, + }; + } + + const entry = try this.pool.getOrPut(hash); + if (entry.found_existing) { + return .{ + .value = entry.value_ptr.*, + .hash = hash, + }; + } + + const new = try String.initAppend(&this.bytes, str); + entry.value_ptr.* = new; + return .{ + .value = new, + .hash = hash, + }; + } + }; + pub const Tag = enum { small, big, @@ -187,6 +290,60 @@ pub const String = extern struct { }; } + pub fn initInline( + in: string, + ) String { + bun.assertWithLocation(canInline(in), @src()); + return switch (in.len) { + 0 => .{}, + 1 => .{ .bytes = .{ in[0], 0, 0, 0, 0, 0, 0, 0 } }, + 2 => .{ .bytes = .{ in[0], in[1], 0, 0, 0, 0, 0, 0 } }, + 3 => .{ .bytes = .{ in[0], in[1], in[2], 0, 0, 0, 0, 0 } }, + 4 => .{ .bytes = .{ in[0], in[1], in[2], in[3], 0, 0, 0, 0 } }, + 5 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], 0, 0, 0 } }, + 6 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], 0, 0 } }, + 7 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], in[6], 0 } }, + 8 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], in[6], in[7] } }, + else => unreachable, + }; + } + + pub fn initAppendIfNeeded( + buf: *std.ArrayList(u8), + in: string, + ) OOM!String { + return switch (in.len) { + 0 => .{}, + 1 => .{ .bytes = .{ in[0], 0, 0, 0, 0, 0, 0, 0 } }, + 2 => .{ .bytes = .{ in[0], in[1], 0, 0, 0, 0, 0, 0 } }, + 3 => .{ .bytes = .{ in[0], in[1], in[2], 0, 0, 0, 0, 0 } }, + 4 => .{ .bytes = .{ in[0], in[1], in[2], in[3], 0, 0, 0, 0 } }, + 5 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], 0, 0, 0 } }, + 6 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], 0, 0 } }, + 7 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], in[6], 0 } }, + + max_inline_len => + // If they use the final bit, then it's a big string. + // This should only happen for non-ascii strings that are exactly 8 bytes. + // so that's an edge-case + if ((in[max_inline_len - 1]) >= 128) + try initAppend(buf, in) + else + .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], in[6], in[7] } }, + + else => try initAppend(buf, in), + }; + } + + pub fn initAppend( + buf: *std.ArrayList(u8), + in: string, + ) OOM!String { + try buf.appendSlice(in); + const in_buf = buf.items[buf.items.len - in.len ..]; + return @bitCast((@as(u64, 0) | @as(u64, @as(max_addressable_space, @truncate(@as(u64, @bitCast(Pointer.init(buf.items, in_buf))))))) | 1 << 63); + } + pub fn eql(this: String, that: String, this_buf: []const u8, that_buf: []const u8) bool { if (this.isInline() and that.isInline()) { return @as(u64, @bitCast(this.bytes)) == @as(u64, @bitCast(that.bytes)); diff --git a/src/install/versioned_url.zig b/src/install/versioned_url.zig index 0a06856d7ca9e6..ac31f98ece1521 100644 --- a/src/install/versioned_url.zig +++ b/src/install/versioned_url.zig @@ -13,10 +13,6 @@ pub const VersionedURL = extern struct { return this.version.order(other.version, lhs_buf, rhs_buf); } - pub fn fmt(this: VersionedURL, buf: []const u8) Semver.Version.Formatter { - return this.version.fmt(buf); - } - pub fn count(this: VersionedURL, buf: []const u8, comptime Builder: type, builder: Builder) void { this.version.count(buf, comptime Builder, builder); builder.count(this.url.slice(buf)); diff --git a/src/js/builtins/BundlerPlugin.ts b/src/js/builtins/BundlerPlugin.ts index ed2c5e653b5b8d..a4ded7976c1367 100644 --- a/src/js/builtins/BundlerPlugin.ts +++ b/src/js/builtins/BundlerPlugin.ts @@ -1,11 +1,4 @@ -import type { - BuildConfig, - BunPlugin, - OnLoadCallback, - OnResolveCallback, - PluginBuilder, - PluginConstraints, -} from "bun"; +import type { BuildConfig, BunPlugin, OnLoadCallback, OnResolveCallback, PluginBuilder, PluginConstraints } from "bun"; type AnyFunction = (...args: any[]) => any; interface BundlerPlugin { @@ -73,8 +66,10 @@ export function runSetupFunction( if (map === onBeforeParsePlugins) { isOnBeforeParse = true; // TODO: how to check if it a napi module here? - if (!callback) { - throw new TypeError("onBeforeParse `napiModule` must be a Napi module"); + if (!callback || !$isObject(callback) || !callback.$napiDlopenHandle) { + throw new TypeError( + "onBeforeParse `napiModule` must be a Napi module which exports the `BUN_PLUGIN_NAME` symbol.", + ); } if (typeof symbol !== "string") { @@ -134,7 +129,7 @@ export function runSetupFunction( const self = this; function onStart(callback) { - if(isBake) { + if (isBake) { throw new TypeError("onStart() is not supported in Bake yet"); } if (!$isCallable(callback)) { @@ -370,7 +365,14 @@ export function runOnResolvePlugins(this: BundlerPlugin, specifier, inputNamespa } } -export function runOnLoadPlugins(this: BundlerPlugin, internalID, path, namespace, defaultLoaderId, isServerSide: boolean) { +export function runOnLoadPlugins( + this: BundlerPlugin, + internalID, + path, + namespace, + defaultLoaderId, + isServerSide: boolean, +) { const LOADERS_MAP = $LoaderLabelToId; const loaderName = $LoaderIdToLabel[defaultLoaderId]; @@ -411,15 +413,15 @@ export function runOnLoadPlugins(this: BundlerPlugin, internalID, path, namespac } var { contents, loader = defaultLoader } = result as any; - if ((loader as any) === 'object') { - if (!('exports' in result)) { + if ((loader as any) === "object") { + if (!("exports" in result)) { throw new TypeError('onLoad plugin returning loader: "object" must have "exports" property'); } try { contents = JSON.stringify(result.exports); - loader = 'json'; + loader = "json"; } catch (e) { - throw new TypeError('When using Bun.build, onLoad plugin must return a JSON-serializable object: ' + e) ; + throw new TypeError("When using Bun.build, onLoad plugin must return a JSON-serializable object: " + e); } } diff --git a/src/js/internal/primordials.js b/src/js/internal/primordials.js index e68d6d6fe3f6c5..565a056a60de3f 100644 --- a/src/js/internal/primordials.js +++ b/src/js/internal/primordials.js @@ -124,6 +124,7 @@ export default { MathRound: Math.round, MathSqrt: Math.sqrt, MathTrunc: Math.trunc, + MathAbs: Math.abs, Number, NumberIsFinite: Number.isFinite, NumberIsNaN: Number.isNaN, @@ -195,6 +196,7 @@ export default { StringPrototypeSplit: uncurryThis(String.prototype.split), StringPrototypeStartsWith: uncurryThis(String.prototype.startsWith), StringPrototypeToLowerCase: uncurryThis(String.prototype.toLowerCase), + StringPrototypeToUpperCase: uncurryThis(String.prototype.toUpperCase), StringPrototypeTrim: uncurryThis(String.prototype.trim), StringPrototypeValueOf: uncurryThis(String.prototype.valueOf), SymbolPrototypeToString: uncurryThis(Symbol.prototype.toString), diff --git a/src/js/node/events.ts b/src/js/node/events.ts index 462de72833f68a..8e3d875d452a53 100644 --- a/src/js/node/events.ts +++ b/src/js/node/events.ts @@ -341,9 +341,18 @@ EventEmitterPrototype.rawListeners = function rawListeners(type) { return handlers.slice(); }; -EventEmitterPrototype.listenerCount = function listenerCount(type) { +EventEmitterPrototype.listenerCount = function listenerCount(type, method) { var { _events: events } = this; if (!events) return 0; + if (method != null) { + var length = 0; + for (const handler of events[type] ?? []) { + if (handler === method || handler.listener === method) { + length++; + } + } + return length; + } return events[type]?.length ?? 0; }; Object.defineProperty(EventEmitterPrototype.listenerCount, "name", { value: "listenerCount" }); diff --git a/src/js/node/querystring.ts b/src/js/node/querystring.ts index 73a9a0ac15a72a..21d16fb44172b7 100644 --- a/src/js/node/querystring.ts +++ b/src/js/node/querystring.ts @@ -1,398 +1,546 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + var __commonJS = (cb, mod: typeof module | undefined = undefined) => () => (mod || cb((mod = { exports: {} }).exports, mod), mod.exports); -var Buffer = require("node:buffer").Buffer; - -// src/node-fallbacks/node_modules/querystring-es3/src/object-keys.js -var require_object_keys = __commonJS((exports, module) => { - var objectKeys = - Object.keys || - (function () { - var hasOwnProperty = Object.prototype.hasOwnProperty; - var hasDontEnumBug = !{ toString: null }.propertyIsEnumerable("toString"); - var dontEnums = [ - "toString", - "toLocaleString", - "valueOf", - "hasOwnProperty", - "isPrototypeOf", - "propertyIsEnumerable", - "constructor", - ]; - var dontEnumsLength = dontEnums.length; - return function (obj) { - if (typeof obj !== "function" && (typeof obj !== "object" || obj === null)) { - throw new TypeError("Object.keys called on non-object"); - } - var result = []; - var prop; - var i; - for (prop in obj) { - if (hasOwnProperty.$call(obj, prop)) { - result.push(prop); - } - } - if (hasDontEnumBug) { - for (i = 0; i < dontEnumsLength; i++) { - if (hasOwnProperty.$call(obj, dontEnums[i])) { - result.push(dontEnums[i]); - } - } - } - return result; - }; - })(); - module.exports = objectKeys; -}); - -// src/node-fallbacks/node_modules/querystring-es3/src/index.js var require_src = __commonJS((exports, module) => { - var ParsedQueryString = function () {}; - var unescapeBuffer = function (s, decodeSpaces) { - var out = Buffer.allocUnsafe(s.length); - var state = 0; - var n, m, hexchar, c; - for (var inIndex = 0, outIndex = 0; ; inIndex++) { - if (inIndex < s.length) { - c = s.charCodeAt(inIndex); - } else { - if (state > 0) { - out[outIndex++] = 37; - if (state === 2) out[outIndex++] = hexchar; + const { + Array, + ArrayIsArray, + Int8Array, + MathAbs, + NumberIsFinite, + ObjectKeys, + String, + StringPrototypeCharCodeAt, + StringPrototypeSlice, + decodeURIComponent, + StringPrototypeToUpperCase, + NumberPrototypeToString, + } = require("internal/primordials"); + + const { Buffer } = require("node:buffer"); + + /** + * @param {string} str + * @param {Int8Array} noEscapeTable + * @param {string[]} hexTable + * @returns {string} + */ + function encodeStr(str, noEscapeTable, hexTable) { + const len = str.length; + if (len === 0) return ""; + + let out = ""; + let lastPos = 0; + let i = 0; + + outer: for (; i < len; i++) { + let c = StringPrototypeCharCodeAt(str, i); + + // ASCII + while (c < 0x80) { + if (noEscapeTable[c] !== 1) { + if (lastPos < i) out += StringPrototypeSlice(str, lastPos, i); + lastPos = i + 1; + out += hexTable[c]; } - break; + + if (++i === len) break outer; + + c = StringPrototypeCharCodeAt(str, i); } - switch (state) { - case 0: - switch (c) { - case 37: - n = 0; - m = 0; - state = 1; - break; - case 43: - if (decodeSpaces) c = 32; - default: - out[outIndex++] = c; - break; - } - break; - case 1: - hexchar = c; - n = unhexTable[c]; - if (!(n >= 0)) { - out[outIndex++] = 37; - out[outIndex++] = c; - state = 0; - break; - } - state = 2; - break; - case 2: - state = 0; - m = unhexTable[c]; - if (!(m >= 0)) { - out[outIndex++] = 37; - out[outIndex++] = hexchar; - out[outIndex++] = c; - break; + + if (lastPos < i) out += StringPrototypeSlice(str, lastPos, i); + + // Multi-byte characters ... + if (c < 0x800) { + lastPos = i + 1; + out += hexTable[0xc0 | (c >> 6)] + hexTable[0x80 | (c & 0x3f)]; + continue; + } + if (c < 0xd800 || c >= 0xe000) { + lastPos = i + 1; + out += hexTable[0xe0 | (c >> 12)] + hexTable[0x80 | ((c >> 6) & 0x3f)] + hexTable[0x80 | (c & 0x3f)]; + continue; + } + // Surrogate pair + ++i; + + // This branch should never happen because all URLSearchParams entries + // should already be converted to USVString. But, included for + // completion's sake anyway. + if (i >= len) throw $ERR_INVALID_URI("URI malformed"); + + const c2 = StringPrototypeCharCodeAt(str, i) & 0x3ff; + + lastPos = i + 1; + c = 0x10000 + (((c & 0x3ff) << 10) | c2); + out += + hexTable[0xf0 | (c >> 18)] + + hexTable[0x80 | ((c >> 12) & 0x3f)] + + hexTable[0x80 | ((c >> 6) & 0x3f)] + + hexTable[0x80 | (c & 0x3f)]; + } + if (lastPos === 0) return str; + if (lastPos < len) return out + StringPrototypeSlice(str, lastPos); + return out; + } + + const hexTable = new Array(256); + for (let i = 0; i < 256; ++i) + hexTable[i] = "%" + StringPrototypeToUpperCase((i < 16 ? "0" : "") + NumberPrototypeToString(i, 16)); + // prettier-ignore + const isHexTable = new Int8Array([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0 - 15 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 16 - 31 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 32 - 47 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, // 48 - 63 + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 64 - 79 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 80 - 95 + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 96 - 111 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 112 - 127 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 128 ... + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // ... 256 + ]); + const QueryString = (module.exports = { + unescapeBuffer, + // `unescape()` is a JS global, so we need to use a different local name + unescape: qsUnescape, + + // `escape()` is a JS global, so we need to use a different local name + escape: qsEscape, + + stringify, + encode: stringify, + + parse, + decode: parse, + }); + + // prettier-ignore + const unhexTable = new Int8Array([ + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 0 - 15 + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 16 - 31 + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 32 - 47 + +0, +1, +2, +3, +4, +5, +6, +7, +8, +9, -1, -1, -1, -1, -1, -1, // 48 - 63 + -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 64 - 79 + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 80 - 95 + -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 96 - 111 + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 112 - 127 + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 128 ... + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // ... 255 + ]); + /** + * A safe fast alternative to decodeURIComponent + * @param {string} s + * @param {boolean} decodeSpaces + * @returns {string} + */ + function unescapeBuffer(s, decodeSpaces) { + const out = Buffer.allocUnsafe(s.length); + let index = 0; + let outIndex = 0; + let currentChar; + let nextChar; + let hexHigh; + let hexLow; + const maxLength = s.length - 2; + // Flag to know if some hex chars have been decoded + let hasHex = false; + while (index < s.length) { + currentChar = StringPrototypeCharCodeAt(s, index); + if (currentChar === 43 /* '+' */ && decodeSpaces) { + out[outIndex++] = 32; // ' ' + index++; + continue; + } + if (currentChar === 37 /* '%' */ && index < maxLength) { + currentChar = StringPrototypeCharCodeAt(s, ++index); + hexHigh = unhexTable[currentChar]; + if (!(hexHigh >= 0)) { + out[outIndex++] = 37; // '%' + continue; + } else { + nextChar = StringPrototypeCharCodeAt(s, ++index); + hexLow = unhexTable[nextChar]; + if (!(hexLow >= 0)) { + out[outIndex++] = 37; // '%' + index--; + } else { + hasHex = true; + currentChar = hexHigh * 16 + hexLow; } - out[outIndex++] = 16 * n + m; - break; + } } + out[outIndex++] = currentChar; + index++; } - return out.slice(0, outIndex); - }; - var qsUnescape = function (s, decodeSpaces) { + return hasHex ? out.slice(0, outIndex) : out; + } + + /** + * @param {string} s + * @param {boolean} decodeSpaces + * @returns {string} + */ + function qsUnescape(s, decodeSpaces) { try { return decodeURIComponent(s); - } catch (e) { + } catch { return QueryString.unescapeBuffer(s, decodeSpaces).toString(); } - }; - var qsEscape = function (str) { + } + + // These characters do not need escaping when generating query strings: + // ! - . _ ~ + // ' ( ) * + // digits + // alpha (uppercase) + // alpha (lowercase) + // prettier-ignore + const noEscape = new Int8Array([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0 - 15 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 16 - 31 + 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, // 32 - 47 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, // 48 - 63 + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 64 - 79 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, // 80 - 95 + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 96 - 111 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, // 112 - 127 +]); + + /** + * QueryString.escape() replaces encodeURIComponent() + * @see https://www.ecma-international.org/ecma-262/5.1/#sec-15.1.3.4 + * @param {any} str + * @returns {string} + */ + function qsEscape(str) { if (typeof str !== "string") { if (typeof str === "object") str = String(str); else str += ""; } - var out = ""; - var lastPos = 0; - for (var i2 = 0; i2 < str.length; ++i2) { - var c = str.charCodeAt(i2); - if (c < 128) { - if (noEscape[c] === 1) continue; - if (lastPos < i2) out += str.slice(lastPos, i2); - lastPos = i2 + 1; - out += hexTable[c]; - continue; - } - if (lastPos < i2) out += str.slice(lastPos, i2); - if (c < 2048) { - lastPos = i2 + 1; - out += hexTable[192 | (c >> 6)] + hexTable[128 | (c & 63)]; - continue; - } - if (c < 55296 || c >= 57344) { - lastPos = i2 + 1; - out += hexTable[224 | (c >> 12)] + hexTable[128 | ((c >> 6) & 63)] + hexTable[128 | (c & 63)]; - continue; - } - ++i2; - var c2; - if (i2 < str.length) c2 = str.charCodeAt(i2) & 1023; - else throw new URIError("URI malformed"); - lastPos = i2 + 1; - c = 65536 + (((c & 1023) << 10) | c2); - out += - hexTable[240 | (c >> 18)] + - hexTable[128 | ((c >> 12) & 63)] + - hexTable[128 | ((c >> 6) & 63)] + - hexTable[128 | (c & 63)]; - } - if (lastPos === 0) return str; - if (lastPos < str.length) return out + str.slice(lastPos); - return out; - }; - var stringifyPrimitive = function (v) { + + return encodeStr(str, noEscape, hexTable); + } + + /** + * @param {string | number | bigint | boolean | symbol | undefined | null} v + * @returns {string} + */ + function stringifyPrimitive(v) { if (typeof v === "string") return v; - if (typeof v === "number" && isFinite(v)) return "" + v; + if (typeof v === "number" && NumberIsFinite(v)) return "" + v; + if (typeof v === "bigint") return "" + v; + if (typeof v === "boolean") return v ? "true" : "false"; + return ""; + } + + /** + * @param {string | number | bigint | boolean} v + * @param {(v: string) => string} encode + * @returns {string} + */ + function encodeStringified(v, encode) { + if (typeof v === "string") return v.length ? encode(v) : ""; + if (typeof v === "number" && NumberIsFinite(v)) { + // Values >= 1e21 automatically switch to scientific notation which requires + // escaping due to the inclusion of a '+' in the output + return MathAbs(v) < 1e21 ? "" + v : encode("" + v); + } + if (typeof v === "bigint") return "" + v; if (typeof v === "boolean") return v ? "true" : "false"; return ""; - }; - var stringify = function (obj, sep, eq, options) { - sep = sep || "&"; - eq = eq || "="; - var encode = QueryString.escape; + } + + /** + * @param {string | number | boolean | null} v + * @param {(v: string) => string} encode + * @returns {string} + */ + function encodeStringifiedCustom(v, encode) { + return encode(stringifyPrimitive(v)); + } + + /** + * @param {Record | null>} obj + * @param {string} [sep] + * @param {string} [eq] + * @param {{ encodeURIComponent?: (v: string) => string }} [options] + * @returns {string} + */ + function stringify(obj, sep, eq, options) { + sep ||= "&"; + eq ||= "="; + + let encode = QueryString.escape; if (options && typeof options.encodeURIComponent === "function") { encode = options.encodeURIComponent; } + const convert = encode === qsEscape ? encodeStringified : encodeStringifiedCustom; + if (obj !== null && typeof obj === "object") { - var keys = objectKeys(obj); - var len = keys.length; - var flast = len - 1; - var fields = ""; - for (var i2 = 0; i2 < len; ++i2) { - var k = keys[i2]; - var v = obj[k]; - var ks = encode(stringifyPrimitive(k)) + eq; - if (isArray(v)) { - var vlen = v.length; - var vlast = vlen - 1; - for (var j = 0; j < vlen; ++j) { - fields += ks + encode(stringifyPrimitive(v[j])); - if (j < vlast) fields += sep; + const keys = ObjectKeys(obj); + const len = keys.length; + let fields = ""; + for (let i = 0; i < len; ++i) { + const k = keys[i]; + const v = obj[k]; + let ks = convert(k, encode); + ks += eq; + + if (ArrayIsArray(v)) { + const vlen = v.length; + if (vlen === 0) continue; + if (fields) fields += sep; + for (let j = 0; j < vlen; ++j) { + if (j) fields += sep; + fields += ks; + fields += convert(v[j], encode); } - if (vlen && i2 < flast) fields += sep; } else { - fields += ks + encode(stringifyPrimitive(v)); - if (i2 < flast) fields += sep; + if (fields) fields += sep; + fields += ks; + fields += convert(v, encode); } } return fields; } return ""; - }; - var charCodes = function (str) { + } + + /** + * @param {string} str + * @returns {number[]} + */ + function charCodes(str) { if (str.length === 0) return []; - if (str.length === 1) return [str.charCodeAt(0)]; - const ret = []; - for (var i2 = 0; i2 < str.length; ++i2) ret[ret.length] = str.charCodeAt(i2); + if (str.length === 1) return [StringPrototypeCharCodeAt(str, 0)]; + const ret = new Array(str.length); + for (let i = 0; i < str.length; ++i) ret[i] = StringPrototypeCharCodeAt(str, i); return ret; - }; - var parse = function (qs, sep, eq, options) { - const obj = new ParsedQueryString(); + } + const defSepCodes = [38]; // & + const defEqCodes = [61]; // = + + function addKeyVal(obj, key, value, keyEncoded, valEncoded, decode) { + if (key.length > 0 && keyEncoded) key = decodeStr(key, decode); + if (value.length > 0 && valEncoded) value = decodeStr(value, decode); + + if (obj[key] === undefined) { + obj[key] = value; + } else { + const curValue = obj[key]; + // A simple Array-specific property check is enough here to + // distinguish from a string value and is faster and still safe + // since we are generating all of the values being assigned. + if (curValue.pop) curValue[curValue.length] = value; + else obj[key] = [curValue, value]; + } + } + + /** + * Parse a key/val string. + * @param {string} qs + * @param {string} sep + * @param {string} eq + * @param {{ + * maxKeys?: number; + * decodeURIComponent?(v: string): string; + * }} [options] + * @returns {Record} + */ + function parse(qs, sep, eq, options) { + const obj = { __proto__: null }; + if (typeof qs !== "string" || qs.length === 0) { return obj; } - var sepCodes = !sep ? defSepCodes : charCodes(sep + ""); - var eqCodes = !eq ? defEqCodes : charCodes(eq + ""); + + const sepCodes = !sep ? defSepCodes : charCodes(String(sep)); + const eqCodes = !eq ? defEqCodes : charCodes(String(eq)); const sepLen = sepCodes.length; const eqLen = eqCodes.length; - var pairs = 1000; + + let pairs = 1000; if (options && typeof options.maxKeys === "number") { + // -1 is used in place of a value like Infinity for meaning + // "unlimited pairs" because of additional checks V8 (at least as of v5.4) + // has to do when using variables that contain values like Infinity. Since + // `pairs` is always decremented and checked explicitly for 0, -1 works + // effectively the same as Infinity, while providing a significant + // performance boost. pairs = options.maxKeys > 0 ? options.maxKeys : -1; } - var decode = QueryString.unescape; + + let decode = QueryString.unescape; if (options && typeof options.decodeURIComponent === "function") { decode = options.decodeURIComponent; } const customDecode = decode !== qsUnescape; - const keys = []; - var posIdx = 0; - var lastPos = 0; - var sepIdx = 0; - var eqIdx = 0; - var key = ""; - var value = ""; - var keyEncoded = customDecode; - var valEncoded = customDecode; - var encodeCheck = 0; - for (var i2 = 0; i2 < qs.length; ++i2) { - const code = qs.charCodeAt(i2); + + let lastPos = 0; + let sepIdx = 0; + let eqIdx = 0; + let key = ""; + let value = ""; + let keyEncoded = customDecode; + let valEncoded = customDecode; + const plusChar = customDecode ? "%20" : " "; + let encodeCheck = 0; + for (let i = 0; i < qs.length; ++i) { + const code = StringPrototypeCharCodeAt(qs, i); + + // Try matching key/value pair separator (e.g. '&') if (code === sepCodes[sepIdx]) { if (++sepIdx === sepLen) { - const end = i2 - sepIdx + 1; + // Key/value pair separator match! + const end = i - sepIdx + 1; if (eqIdx < eqLen) { - if (lastPos < end) key += qs.slice(lastPos, end); - } else if (lastPos < end) value += qs.slice(lastPos, end); - if (keyEncoded) key = decodeStr(key, decode); - if (valEncoded) value = decodeStr(value, decode); - if (key || value || lastPos - posIdx > sepLen || i2 === 0) { - if (indexOf(keys, key) === -1) { - obj[key] = value; - keys[keys.length] = key; - } else { - const curValue = obj[key] || ""; - if (curValue.pop) curValue[curValue.length] = value; - else if (curValue) obj[key] = [curValue, value]; + // We didn't find the (entire) key/value separator + if (lastPos < end) { + // Treat the substring as part of the key instead of the value + key += StringPrototypeSlice(qs, lastPos, end); + } else if (key.length === 0) { + // We saw an empty substring between separators + if (--pairs === 0) return obj; + lastPos = i + 1; + sepIdx = eqIdx = 0; + continue; } - } else if (i2 === 1) { - delete obj[key]; + } else if (lastPos < end) { + value += StringPrototypeSlice(qs, lastPos, end); } - if (--pairs === 0) break; + + addKeyVal(obj, key, value, keyEncoded, valEncoded, decode); + + if (--pairs === 0) return obj; keyEncoded = valEncoded = customDecode; - encodeCheck = 0; key = value = ""; - posIdx = lastPos; - lastPos = i2 + 1; + encodeCheck = 0; + lastPos = i + 1; sepIdx = eqIdx = 0; } - continue; } else { sepIdx = 0; - if (!valEncoded) { - if (code === 37) { - encodeCheck = 1; - } else if ( - encodeCheck > 0 && - ((code >= 48 && code <= 57) || (code >= 65 && code <= 70) || (code >= 97 && code <= 102)) - ) { - if (++encodeCheck === 3) valEncoded = true; + // Try matching key/value separator (e.g. '=') if we haven't already + if (eqIdx < eqLen) { + if (code === eqCodes[eqIdx]) { + if (++eqIdx === eqLen) { + // Key/value separator match! + const end = i - eqIdx + 1; + if (lastPos < end) key += StringPrototypeSlice(qs, lastPos, end); + encodeCheck = 0; + lastPos = i + 1; + } + continue; } else { - encodeCheck = 0; + eqIdx = 0; + if (!keyEncoded) { + // Try to match an (valid) encoded byte once to minimize unnecessary + // calls to string decoding functions + if (code === 37 /* % */) { + encodeCheck = 1; + continue; + } else if (encodeCheck > 0) { + if (isHexTable[code] === 1) { + if (++encodeCheck === 3) keyEncoded = true; + continue; + } else { + encodeCheck = 0; + } + } + } } - } - } - if (eqIdx < eqLen) { - if (code === eqCodes[eqIdx]) { - if (++eqIdx === eqLen) { - const end = i2 - eqIdx + 1; - if (lastPos < end) key += qs.slice(lastPos, end); - encodeCheck = 0; - lastPos = i2 + 1; + if (code === 43 /* + */) { + if (lastPos < i) key += StringPrototypeSlice(qs, lastPos, i); + key += plusChar; + lastPos = i + 1; + continue; } - continue; - } else { - eqIdx = 0; - if (!keyEncoded) { - if (code === 37) { - encodeCheck = 1; - } else if ( - encodeCheck > 0 && - ((code >= 48 && code <= 57) || (code >= 65 && code <= 70) || (code >= 97 && code <= 102)) - ) { - if (++encodeCheck === 3) keyEncoded = true; + } + if (code === 43 /* + */) { + if (lastPos < i) value += StringPrototypeSlice(qs, lastPos, i); + value += plusChar; + lastPos = i + 1; + } else if (!valEncoded) { + // Try to match an (valid) encoded byte (once) to minimize unnecessary + // calls to string decoding functions + if (code === 37 /* % */) { + encodeCheck = 1; + } else if (encodeCheck > 0) { + if (isHexTable[code] === 1) { + if (++encodeCheck === 3) valEncoded = true; } else { encodeCheck = 0; } } } } - if (code === 43) { - if (eqIdx < eqLen) { - if (lastPos < i2) key += qs.slice(lastPos, i2); - key += "%20"; - keyEncoded = true; - } else { - if (lastPos < i2) value += qs.slice(lastPos, i2); - value += "%20"; - valEncoded = true; - } - lastPos = i2 + 1; - } } - if (pairs !== 0 && (lastPos < qs.length || eqIdx > 0)) { - if (lastPos < qs.length) { - if (eqIdx < eqLen) key += qs.slice(lastPos); - else if (sepIdx < sepLen) value += qs.slice(lastPos); - } - if (keyEncoded) key = decodeStr(key, decode); - if (valEncoded) value = decodeStr(value, decode); - if (indexOf(keys, key) === -1) { - obj[key] = value; - keys[keys.length] = key; - } else { - const curValue = obj[key]; - if (curValue.pop) curValue[curValue.length] = value; - else obj[key] = [curValue, value]; - } + + // Deal with any leftover key or value data + if (lastPos < qs.length) { + if (eqIdx < eqLen) key += StringPrototypeSlice(qs, lastPos); + else if (sepIdx < sepLen) value += StringPrototypeSlice(qs, lastPos); + } else if (eqIdx === 0 && key.length === 0) { + // We ended on an empty substring + return obj; } + + addKeyVal(obj, key, value, keyEncoded, valEncoded, decode); + return obj; - }; - var decodeStr = function (s, decoder) { + } + + /** + * V8 does not optimize functions with try-catch blocks, so we isolate them here + * to minimize the damage (Note: no longer true as of V8 5.4 -- but still will + * not be inlined). + * @param {string} s + * @param {(v: string) => string} decoder + * @returns {string} + */ + function decodeStr(s, decoder) { try { return decoder(s); - } catch (e) { + } catch { return QueryString.unescape(s, true); } - }; - var QueryString = (module.exports = { - unescapeBuffer, - unescape: qsUnescape, - escape: qsEscape, - stringify, - encode: stringify, - parse, - decode: parse, - }); - var objectKeys = require_object_keys(); - var isArray = arg => Object.prototype.toString.$call(arg) === "[object Array]"; - var indexOf = (arr, searchElement, fromIndex) => { - var k; - if (arr == null) { - throw new TypeError('"arr" is null or not defined'); - } - var o = Object(arr); - var len = o.length >>> 0; - if (len === 0) { - return -1; - } - var n = fromIndex | 0; - if (n >= len) { - return -1; - } - k = Math.max(n >= 0 ? n : len - Math.abs(n), 0); - while (k < len) { - if (k in o && o[k] === searchElement) { - return k; - } - k++; - } - return -1; - }; - ParsedQueryString.prototype = Object.create ? Object.create(null) : {}; - var unhexTable = [ - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, - -1, -1, -1, -1, -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, - ]; - var hexTable = []; - for (i = 0; i < 256; ++i) hexTable[i] = "%" + ((i < 16 ? "0" : "") + i.toString(16)).toUpperCase(); - var i; - var noEscape = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, - 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, - ]; - var defSepCodes = [38]; - var defEqCodes = [61]; + } }); export default require_src(); diff --git a/src/json_parser.zig b/src/json_parser.zig index 4d30b2d09018f2..22ef5f46329a79 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -34,7 +34,7 @@ const G = js_ast.G; const T = js_lexer.T; const E = js_ast.E; const Stmt = js_ast.Stmt; -const Expr = js_ast.Expr; +pub const Expr = js_ast.Expr; const Binding = js_ast.Binding; const Symbol = js_ast.Symbol; const Level = js_ast.Op.Level; @@ -347,14 +347,16 @@ fn JSONLikeParser_( }; } -// This is a special JSON parser that stops as soon as it finds +// This is a special JSON parser that stops as soon as it finds combinations of // { // "name": "NAME_IN_HERE", // "version": "VERSION_IN_HERE", +// "bin": ... or "directories": { "bin": ... } // } -// and then returns the name and version. -// More precisely, it stops as soon as it finds a top-level "name" and "version" property which are strings -// In most cases, it should perform zero heap allocations because it does not create arrays or objects (It just skips them) +// and then returns the name, version, and bin +// More precisely, it stops as soon as it finds a top-level "name" and "version" (and/or "bin"). +// In most cases, it should perform zero heap allocations because it does not create arrays or objects (It just skips them). +// If searching for "bin", objects are only created if the key is top level "bin". "bin" within "directories" can only be a string. pub const PackageJSONVersionChecker = struct { const Lexer = js_lexer.NewLexer(opts); @@ -369,9 +371,14 @@ pub const PackageJSONVersionChecker = struct { found_name: []const u8 = "", found_version: []const u8 = "", + found_bin: union(enum) { + bin: Expr, + dir: Expr, + } = .{ .bin = Expr.empty }, has_found_name: bool = false, has_found_version: bool = false, + has_found_bin: bool = false, name_loc: logger.Loc = logger.Loc.Empty, @@ -382,21 +389,24 @@ pub const PackageJSONVersionChecker = struct { .allow_comments = true, }; - pub fn init(allocator: std.mem.Allocator, source: *const logger.Source, log: *logger.Log) !Parser { + pub fn init(allocator: std.mem.Allocator, source: *const logger.Source, log: *logger.Log, checks: enum { check_for_bin, ignore_bin, only_bin }) !Parser { return Parser{ .lexer = try Lexer.init(log, source.*, allocator), .allocator = allocator, .log = log, .source = source, + .has_found_bin = checks == .ignore_bin, + .has_found_name = checks == .only_bin, + .has_found_version = checks == .only_bin, }; } const Parser = @This(); - pub fn parseExpr(p: *Parser) anyerror!Expr { + pub fn parseExpr(p: *Parser, collect_props: bool, parent_is_directories: bool) anyerror!Expr { const loc = p.lexer.loc(); - if (p.has_found_name and p.has_found_version) return newExpr(E.Missing{}, loc); + if (p.has_found_name and p.has_found_version and p.has_found_bin) return newExpr(E.Missing{}, loc); switch (p.lexer.token) { .t_false => { @@ -443,7 +453,7 @@ pub const PackageJSONVersionChecker = struct { } } - _ = try p.parseExpr(); + _ = try p.parseExpr(false, false); has_exprs = true; } @@ -455,6 +465,8 @@ pub const PackageJSONVersionChecker = struct { p.depth += 1; defer p.depth -= 1; + var properties = std.ArrayList(G.Property).init(p.allocator); + var has_properties = false; while (p.lexer.token != .t_close_brace) { if (has_properties) { @@ -471,39 +483,95 @@ pub const PackageJSONVersionChecker = struct { try p.lexer.expect(.t_colon); - const value = try p.parseExpr(); + var collect_prop_props = false; + var is_directories = false; + + if (!p.has_found_bin and + p.depth == 1 and + // next is going to be a top level property + // with an object value. check if it is "bin" + // or "directories" + p.lexer.token == .t_open_brace and + key.data == .e_string) + { + if (strings.eqlComptime(key.data.e_string.data, "bin")) { + collect_prop_props = true; + } else if (strings.eqlComptime(key.data.e_string.data, "directories")) { + is_directories = true; + } + + // if bin is in directories it can only be a string, so + // don't need to set collect_prop_props when depth == 2 + // and in parent_is_directories == true. + + } + + const value = try p.parseExpr(collect_prop_props, is_directories); if (p.depth == 1) { // if you have multiple "name" fields in the package.json.... // first one wins - if (key.data == .e_string and value.data == .e_string) { - if (!p.has_found_name and strings.eqlComptime(key.data.e_string.data, "name")) { - const len = @min( - value.data.e_string.data.len, - p.found_name_buf.len, - ); - - bun.copy(u8, &p.found_name_buf, value.data.e_string.data[0..len]); - p.found_name = p.found_name_buf[0..len]; - p.has_found_name = true; - p.name_loc = value.loc; - } else if (!p.has_found_version and strings.eqlComptime(key.data.e_string.data, "version")) { - const len = @min( - value.data.e_string.data.len, - p.found_version_buf.len, - ); - bun.copy(u8, &p.found_version_buf, value.data.e_string.data[0..len]); - p.found_version = p.found_version_buf[0..len]; - p.has_found_version = true; + if (key.data == .e_string) { + if (value.data == .e_string) { + if (!p.has_found_name and strings.eqlComptime(key.data.e_string.data, "name")) { + const len = @min( + value.data.e_string.data.len, + p.found_name_buf.len, + ); + + bun.copy(u8, &p.found_name_buf, value.data.e_string.data[0..len]); + p.found_name = p.found_name_buf[0..len]; + p.has_found_name = true; + p.name_loc = value.loc; + } else if (!p.has_found_version and strings.eqlComptime(key.data.e_string.data, "version")) { + const len = @min( + value.data.e_string.data.len, + p.found_version_buf.len, + ); + bun.copy(u8, &p.found_version_buf, value.data.e_string.data[0..len]); + p.found_version = p.found_version_buf[0..len]; + p.has_found_version = true; + } + } + + if (!p.has_found_bin and strings.eqlComptime(key.data.e_string.data, "bin")) { + p.found_bin = .{ + .bin = value, + }; + p.has_found_bin = true; + } + } + } else if (parent_is_directories) { + if (key.data == .e_string) { + if (!p.has_found_bin and strings.eqlComptime(key.data.e_string.data, "bin")) { + p.found_bin = .{ + .dir = value, + }; + p.has_found_bin = true; } } } - if (p.has_found_name and p.has_found_version) return newExpr(E.Missing{}, loc); + if (p.has_found_name and p.has_found_version and p.has_found_bin) return newExpr(E.Missing{}, loc); + has_properties = true; + if (collect_props) { + properties.append(.{ + .key = key, + .value = value, + .kind = .normal, + .initializer = null, + }) catch bun.outOfMemory(); + } } try p.lexer.expect(.t_close_brace); + + if (collect_props) { + return newExpr(E.Object{ + .properties = G.Property.List.fromList(properties), + }, loc); + } return newExpr(E.Missing{}, loc); }, else => { @@ -775,41 +843,6 @@ pub fn parsePackageJSONUTF8( return try parser.parseExpr(false, true); } -pub fn parsePackageJSONUTF8AlwaysDecode( - source: *const logger.Source, - log: *logger.Log, - allocator: std.mem.Allocator, -) !Expr { - const len = source.contents.len; - - switch (len) { - // This is to be consisntent with how disabled JS files are handled - 0 => { - return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }; - }, - // This is a fast pass I guess - 2 => { - if (strings.eqlComptime(source.contents[0..1], "\"\"") or strings.eqlComptime(source.contents[0..1], "''")) { - return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_string_data }; - } else if (strings.eqlComptime(source.contents[0..1], "{}")) { - return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }; - } else if (strings.eqlComptime(source.contents[0..1], "[]")) { - return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_array_data }; - } - }, - else => {}, - } - - var parser = try JSONLikeParser(.{ - .is_json = true, - .allow_comments = true, - .allow_trailing_commas = true, - }).init(allocator, source.*, log); - bun.assert(parser.source().contents.len > 0); - - return try parser.parseExpr(false, true); -} - const JsonResult = struct { root: Expr, indentation: Indentation = .{}, diff --git a/src/string_mutable.zig b/src/string_mutable.zig index 042184d5014200..0a78d7335f94a0 100644 --- a/src/string_mutable.zig +++ b/src/string_mutable.zig @@ -306,18 +306,18 @@ pub const MutableString = struct { const max = 2048; - pub const Writer = std.io.Writer(*BufferedWriter, anyerror, BufferedWriter.writeAll); + pub const Writer = std.io.Writer(*BufferedWriter, OOM, BufferedWriter.writeAll); inline fn remain(this: *BufferedWriter) []u8 { return this.buffer[this.pos..]; } - pub fn flush(this: *BufferedWriter) !void { + pub fn flush(this: *BufferedWriter) OOM!void { _ = try this.context.writeAll(this.buffer[0..this.pos]); this.pos = 0; } - pub fn writeAll(this: *BufferedWriter, bytes: []const u8) anyerror!usize { + pub fn writeAll(this: *BufferedWriter, bytes: []const u8) OOM!usize { const pending = bytes; if (pending.len >= max) { @@ -342,7 +342,7 @@ pub const MutableString = struct { /// Write a E.String to the buffer. /// This automatically encodes UTF-16 into UTF-8 using /// the same code path as TextEncoder - pub fn writeString(this: *BufferedWriter, bytes: *E.String) anyerror!usize { + pub fn writeString(this: *BufferedWriter, bytes: *E.String) OOM!usize { if (bytes.isUTF8()) { return try this.writeAll(bytes.slice(this.context.allocator)); } @@ -353,7 +353,7 @@ pub const MutableString = struct { /// Write a UTF-16 string to the (UTF-8) buffer /// This automatically encodes UTF-16 into UTF-8 using /// the same code path as TextEncoder - pub fn writeAll16(this: *BufferedWriter, bytes: []const u16) anyerror!usize { + pub fn writeAll16(this: *BufferedWriter, bytes: []const u16) OOM!usize { const pending = bytes; if (pending.len >= max) { @@ -385,7 +385,7 @@ pub const MutableString = struct { return pending.len; } - pub fn writeHTMLAttributeValueString(this: *BufferedWriter, str: *E.String) anyerror!void { + pub fn writeHTMLAttributeValueString(this: *BufferedWriter, str: *E.String) OOM!void { if (str.isUTF8()) { try this.writeHTMLAttributeValue(str.slice(this.context.allocator)); return; @@ -394,7 +394,7 @@ pub const MutableString = struct { try this.writeHTMLAttributeValue16(str.slice16()); } - pub fn writeHTMLAttributeValue(this: *BufferedWriter, bytes: []const u8) anyerror!void { + pub fn writeHTMLAttributeValue(this: *BufferedWriter, bytes: []const u8) OOM!void { var items = bytes; while (items.len > 0) { // TODO: SIMD @@ -416,7 +416,7 @@ pub const MutableString = struct { } } - pub fn writeHTMLAttributeValue16(this: *BufferedWriter, bytes: []const u16) anyerror!void { + pub fn writeHTMLAttributeValue16(this: *BufferedWriter, bytes: []const u16) OOM!void { var items = bytes; while (items.len > 0) { if (strings.indexOfAny16(items, "\"<>")) |j| { diff --git a/test/bundler/native-plugin.test.ts b/test/bundler/native-plugin.test.ts index 83ef6acaaf38a0..10c67d8ce56a3e 100644 --- a/test/bundler/native-plugin.test.ts +++ b/test/bundler/native-plugin.test.ts @@ -2,9 +2,12 @@ import { BunFile, Loader, plugin } from "bun"; import { afterEach, beforeAll, beforeEach, describe, expect, it } from "bun:test"; import path, { dirname, join, resolve } from "path"; import source from "./native_plugin.cc" with { type: "file" }; +import notAPlugin from "./not_native_plugin.cc" with { type: "file" }; import bundlerPluginHeader from "../../packages/bun-native-bundler-plugin-api/bundler_plugin.h" with { type: "file" }; -import { bunEnv, bunExe, tempDirWithFiles } from "harness"; +import { bunEnv, bunExe, makeTree, tempDirWithFiles } from "harness"; import { itBundled } from "bundler/expectBundled"; +import os from "os"; +import fs from "fs"; describe("native-plugins", async () => { const cwd = process.cwd(); @@ -15,6 +18,7 @@ describe("native-plugins", async () => { const files = { "bun-native-bundler-plugin-api/bundler_plugin.h": await Bun.file(bundlerPluginHeader).text(), "plugin.cc": await Bun.file(source).text(), + "not_a_plugin.cc": await Bun.file(notAPlugin).text(), "package.json": JSON.stringify({ "name": "fake-plugin", "module": "index.ts", @@ -48,12 +52,19 @@ values;`, "target_name": "xXx123_foo_counter_321xXx", "sources": [ "plugin.cc" ], "include_dirs": [ "." ] + }, + { + "target_name": "not_a_plugin", + "sources": [ "not_a_plugin.cc" ], + "include_dirs": [ "." ] } ] }`, }; tempdir = tempDirWithFiles("native-plugins", files); + + await makeTree(tempdir, files); outdir = path.join(tempdir, "dist"); console.log("tempdir", tempdir); @@ -491,6 +502,54 @@ const many_foo = ["foo","foo","foo","foo","foo","foo","foo"] expect(compilationCtxFreedCount).toBe(0); }); + it("should fail gracefully when passing something that is NOT a bunler plugin", async () => { + const not_plugins = [require(path.join(tempdir, "build/Release/not_a_plugin.node")), 420, "hi", {}]; + + for (const napiModule of not_plugins) { + try { + await Bun.build({ + outdir, + entrypoints: [path.join(tempdir, "index.ts")], + plugins: [ + { + name: "not_a_plugin", + setup(build) { + build.onBeforeParse({ filter: /\.ts/ }, { napiModule, symbol: "plugin_impl" }); + }, + }, + ], + }); + expect.unreachable(); + } catch (e) { + expect(e.toString()).toContain( + "onBeforeParse `napiModule` must be a Napi module which exports the `BUN_PLUGIN_NAME` symbol.", + ); + } + } + }); + + it("should fail gracefully when can't find the symbol", async () => { + const napiModule = require(path.join(tempdir, "build/Release/xXx123_foo_counter_321xXx.node")); + + try { + await Bun.build({ + outdir, + entrypoints: [path.join(tempdir, "index.ts")], + plugins: [ + { + name: "not_a_plugin", + setup(build) { + build.onBeforeParse({ filter: /\.ts/ }, { napiModule, symbol: "OOGA_BOOGA_420" }); + }, + }, + ], + }); + expect.unreachable(); + } catch (e) { + expect(e.toString()).toContain('TypeError: Could not find the symbol "OOGA_BOOGA_420" in the given napi module.'); + } + }); + it("should use result of the first plugin that runs and doesn't execute the others", async () => { const filter = /\.ts/; diff --git a/test/bundler/native_plugin.cc b/test/bundler/native_plugin.cc index b48eec7dacfb83..51b13fd07d94d7 100644 --- a/test/bundler/native_plugin.cc +++ b/test/bundler/native_plugin.cc @@ -19,7 +19,7 @@ #include #endif -BUN_PLUGIN_EXPORT const char *BUN_PLUGIN_NAME = "native_plugin_test"; +extern "C" BUN_PLUGIN_EXPORT const char *BUN_PLUGIN_NAME = "native_plugin_test"; struct External { std::atomic foo_count; diff --git a/test/bundler/not_native_plugin.cc b/test/bundler/not_native_plugin.cc new file mode 100644 index 00000000000000..1de24320d939ed --- /dev/null +++ b/test/bundler/not_native_plugin.cc @@ -0,0 +1,27 @@ +/* + */ +#include +#include +#include +#include + +#ifdef _WIN32 +#define BUN_PLUGIN_EXPORT __declspec(dllexport) +#else +#define BUN_PLUGIN_EXPORT +#endif + +napi_value HelloWorld(napi_env env, napi_callback_info info) { + napi_value result; + napi_create_string_utf8(env, "hello world", NAPI_AUTO_LENGTH, &result); + return result; +} + +napi_value Init(napi_env env, napi_value exports) { + napi_value fn; + napi_create_function(env, nullptr, 0, HelloWorld, nullptr, &fn); + napi_set_named_property(env, exports, "helloWorld", fn); + return exports; +} + +NAPI_MODULE(NODE_GYP_MODULE_NAME, Init) diff --git a/test/harness.ts b/test/harness.ts index 0921b1dcc01b97..eb55b7b6820454 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -152,25 +152,29 @@ export type DirectoryTree = { | ((opts: { root: string }) => Awaitable); }; -export function tempDirWithFiles(basename: string, files: DirectoryTree): string { - async function makeTree(base: string, tree: DirectoryTree) { - for (const [name, raw_contents] of Object.entries(tree)) { - const contents = typeof raw_contents === "function" ? await raw_contents({ root: base }) : raw_contents; - const joined = join(base, name); - if (name.includes("/")) { - const dir = dirname(name); - if (dir !== name && dir !== ".") { - fs.mkdirSync(join(base, dir), { recursive: true }); - } +export async function makeTree(base: string, tree: DirectoryTree) { + const isDirectoryTree = (value: string | DirectoryTree | Buffer): value is DirectoryTree => + typeof value === "object" && value && typeof value?.byteLength === "undefined"; + + for (const [name, raw_contents] of Object.entries(tree)) { + const contents = typeof raw_contents === "function" ? await raw_contents({ root: base }) : raw_contents; + const joined = join(base, name); + if (name.includes("/")) { + const dir = dirname(name); + if (dir !== name && dir !== ".") { + fs.mkdirSync(join(base, dir), { recursive: true }); } - if (typeof contents === "object" && contents && typeof contents?.byteLength === "undefined") { - fs.mkdirSync(joined); - makeTree(joined, contents); - continue; - } - fs.writeFileSync(joined, contents); } + if (isDirectoryTree(contents)) { + fs.mkdirSync(joined); + makeTree(joined, contents); + continue; + } + fs.writeFileSync(joined, contents); } +} + +export function tempDirWithFiles(basename: string, files: DirectoryTree): string { const base = fs.mkdtempSync(join(fs.realpathSync(os.tmpdir()), basename + "_")); makeTree(base, files); return base; diff --git a/test/js/node/bunfig.toml b/test/js/node/bunfig.toml new file mode 100644 index 00000000000000..cac7f387d5cc4a --- /dev/null +++ b/test/js/node/bunfig.toml @@ -0,0 +1 @@ +preload = ["./harness.ts"] diff --git a/test/js/node/harness.ts b/test/js/node/harness.ts index f8f20089a1d2a5..f723a749ac1e9e 100644 --- a/test/js/node/harness.ts +++ b/test/js/node/harness.ts @@ -1,11 +1,14 @@ -import { AnyFunction } from "bun"; -import { hideFromStackTrace } from "harness"; +/** + * @note this file patches `node:test` via the require cache. + */ +import {AnyFunction} from "bun"; +import {hideFromStackTrace} from "harness"; import assertNode from "node:assert"; type DoneCb = (err?: Error) => any; function noop() {} export function createTest(path: string) { - const { expect, test, it, describe, beforeAll, afterAll, beforeEach, afterEach, mock } = Bun.jest(path); + const {expect, test, it, describe, beforeAll, afterAll, beforeEach, afterEach, mock} = Bun.jest(path); hideFromStackTrace(expect); @@ -201,11 +204,11 @@ export function createTest(path: string) { let completed = 0; const globalTimer = globalTimeout ? (timers.push( - setTimeout(() => { - console.log("Global Timeout"); - done(new Error("Timed out!")); - }, globalTimeout), - ), + setTimeout(() => { + console.log("Global Timeout"); + done(new Error("Timed out!")); + }, globalTimeout), + ), timers[timers.length - 1]) : undefined; function createDoneCb(timeout?: number) { @@ -213,11 +216,11 @@ export function createTest(path: string) { const timer = timeout !== undefined ? (timers.push( - setTimeout(() => { - console.log("Timeout"); - done(new Error("Timed out!")); - }, timeout), - ), + setTimeout(() => { + console.log("Timeout"); + done(new Error("Timed out!")); + }, timeout), + ), timers[timers.length - 1]) : timeout; return (result?: Error) => { @@ -262,3 +265,113 @@ export function createTest(path: string) { declare namespace Bun { function jest(path: string): typeof import("bun:test"); } + +if (Bun.main.includes("node/test/parallel")) { + function createMockNodeTestModule() { + + interface TestError extends Error { + testStack: string[]; + } + type Context = { + filename: string; + testStack: string[]; + failures: Error[]; + successes: number; + addFailure(err: unknown): TestError; + recordSuccess(): void; + } + const contexts: Record = {} + + // @ts-ignore + let activeSuite: Context = undefined; + + function createContext(key: string): Context { + return { + filename: key, // duplicate for ease-of-use + // entered each time describe, it, etc is called + testStack: [], + failures: [], + successes: 0, + addFailure(err: unknown) { + const error: TestError = (err instanceof Error ? err : new Error(err as any)) as any; + error.testStack = this.testStack; + const testMessage = `Test failed: ${this.testStack.join(" > ")}`; + error.message = testMessage + "\n" + error.message; + this.failures.push(error); + console.error(error); + return error; + }, + recordSuccess() { + const fullname = this.testStack.join(" > "); + console.log("✅ Test passed:", fullname); + this.successes++; + } + } + } + + function getContext() { + const key: string = Bun.main;// module.parent?.filename ?? require.main?.filename ?? __filename; + return activeSuite = (contexts[key] ??= createContext(key)); + } + + async function test(label: string | Function, fn?: Function | undefined) { + if (typeof fn !== "function" && typeof label === "function") { + fn = label; + label = fn.name; + } + const ctx = getContext(); + try { + ctx.testStack.push(label as string); + await fn(); + ctx.recordSuccess(); + } catch (err) { + const error = ctx.addFailure(err); + throw error; + } finally { + ctx.testStack.pop(); + } + } + + function describe(labelOrFn: string | Function, maybeFn?: Function) { + const [label, fn] = (typeof labelOrFn == "function" ? [labelOrFn.name, labelOrFn] : [labelOrFn, maybeFn]); + if (typeof fn !== "function") throw new TypeError("Second argument to describe() must be a function."); + + getContext().testStack.push(label); + try { + fn(); + } catch (e) { + getContext().addFailure(e); + throw e + } finally { + getContext().testStack.pop(); + } + + const failures = getContext().failures.length; + const successes = getContext().successes; + console.error(`describe("${label}") finished with ${successes} passed and ${failures} failed tests.`); + if (failures > 0) { + throw new Error(`${failures} tests failed.`); + } + + } + + return { + test, + describe, + } + + } + + require.cache["node:test"] ??= { + exports: createMockNodeTestModule(), + loaded: true, + isPreloading: false, + id: "node:test", + parent: require.main, + filename: "node:test", + children: [], + path: "node:test", + paths: [], + require, + }; +} diff --git a/test/js/node/test/parallel/test-events-customevent.js b/test/js/node/test/parallel/test-events-customevent.js new file mode 100644 index 00000000000000..0cf36aa91cc567 --- /dev/null +++ b/test/js/node/test/parallel/test-events-customevent.js @@ -0,0 +1,323 @@ +// Flags: --expose-internals + +'use strict'; + +const common = require('../common'); +const { ok, strictEqual, deepStrictEqual, throws } = require('node:assert'); +const { inspect } = require('node:util'); + +{ + ok(CustomEvent); + + // Default string + const tag = Object.prototype.toString.call(new CustomEvent('$')); + strictEqual(tag, '[object CustomEvent]'); +} + +{ + // No argument behavior - throw TypeError + throws(() => { + new CustomEvent(); + }, TypeError); + + throws(() => new CustomEvent(Symbol()), TypeError); + + // Too many arguments passed behavior - ignore additional arguments + const ev = new CustomEvent('foo', {}, {}); + strictEqual(ev.type, 'foo'); +} + +{ + const ev = new CustomEvent('$'); + strictEqual(ev.type, '$'); + strictEqual(ev.bubbles, false); + strictEqual(ev.cancelable, false); + strictEqual(ev.detail, null); +} + +{ + // Coercion to string works + strictEqual(new CustomEvent(1).type, '1'); + strictEqual(new CustomEvent(false).type, 'false'); + strictEqual(new CustomEvent({}).type, String({})); +} + +{ + const ev = new CustomEvent('$', { + detail: 56, + sweet: 'x', + cancelable: true, + }); + strictEqual(ev.type, '$'); + strictEqual(ev.bubbles, false); + strictEqual(ev.cancelable, true); + strictEqual(ev.sweet, undefined); + strictEqual(ev.detail, 56); +} + +{ + // Any types of value for `detail` are acceptable. + ['foo', 1, false, [], {}].forEach((i) => { + const ev = new CustomEvent('$', { detail: i }); + strictEqual(ev.detail, i); + }); +} + +{ + // Readonly `detail` behavior + const ev = new CustomEvent('$', { + detail: 56, + }); + strictEqual(ev.detail, 56); + try { + ev.detail = 96; + // eslint-disable-next-line no-unused-vars + } catch (error) { + common.mustCall()(); + } + strictEqual(ev.detail, 56); +} + +{ + const ev = new Event('$', { + detail: 96, + }); + strictEqual(ev.detail, undefined); +} + +// The following tests verify whether CustomEvent works the same as Event +// except carrying custom data. They're based on `parallel/test-eventtarget.js`. + +{ + const ev = new CustomEvent('$'); + strictEqual(ev.type, '$'); + strictEqual(ev.bubbles, false); + strictEqual(ev.cancelable, false); + strictEqual(ev.detail, null); + + strictEqual(ev.defaultPrevented, false); + strictEqual(typeof ev.timeStamp, 'number'); + + // Compatibility properties with the DOM + deepStrictEqual(ev.composedPath(), []); + strictEqual(ev.returnValue, true); + strictEqual(ev.composed, false); + strictEqual(ev.isTrusted, false); + strictEqual(ev.eventPhase, 0); + strictEqual(ev.cancelBubble, false); + + // Not cancelable + ev.preventDefault(); + strictEqual(ev.defaultPrevented, false); +} + +{ + // Invalid options + ['foo', 1, false].forEach((i) => + throws(() => new CustomEvent('foo', i), { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + message: 'The "options" argument must be of type object.' + }), + ); +} + +{ + const ev = new CustomEvent('$'); + strictEqual(ev.constructor.name, 'CustomEvent'); + + // CustomEvent Statics + strictEqual(CustomEvent.NONE, 0); + strictEqual(CustomEvent.CAPTURING_PHASE, 1); + strictEqual(CustomEvent.AT_TARGET, 2); + strictEqual(CustomEvent.BUBBLING_PHASE, 3); + strictEqual(new CustomEvent('foo').eventPhase, CustomEvent.NONE); + + // CustomEvent is a function + strictEqual(CustomEvent.length, 1); +} + +{ + const ev = new CustomEvent('foo'); + strictEqual(ev.cancelBubble, false); + ev.cancelBubble = true; + strictEqual(ev.cancelBubble, true); +} +{ + const ev = new CustomEvent('foo'); + strictEqual(ev.cancelBubble, false); + ev.stopPropagation(); + strictEqual(ev.cancelBubble, true); +} +{ + const ev = new CustomEvent('foo'); + strictEqual(ev.cancelBubble, false); + ev.cancelBubble = 'some-truthy-value'; + strictEqual(ev.cancelBubble, true); +} +{ + const ev = new CustomEvent('foo'); + strictEqual(ev.cancelBubble, false); + ev.cancelBubble = true; + strictEqual(ev.cancelBubble, true); +} +{ + const ev = new CustomEvent('foo'); + strictEqual(ev.cancelBubble, false); + ev.stopPropagation(); + strictEqual(ev.cancelBubble, true); +} +{ + const ev = new CustomEvent('foo'); + strictEqual(ev.cancelBubble, false); + ev.cancelBubble = 'some-truthy-value'; + strictEqual(ev.cancelBubble, true); +} +{ + const ev = new CustomEvent('foo', { cancelable: true }); + strictEqual(ev.type, 'foo'); + strictEqual(ev.cancelable, true); + strictEqual(ev.defaultPrevented, false); + + ev.preventDefault(); + strictEqual(ev.defaultPrevented, true); +} +{ + const ev = new CustomEvent('foo'); + strictEqual(ev.isTrusted, false); +} + +// Works with EventTarget + +{ + const obj = { sweet: 'x', memory: { x: 56, y: 96 } }; + const et = new EventTarget(); + const ev = new CustomEvent('$', { detail: obj }); + const fn = common.mustCall((event) => { + strictEqual(event, ev); + deepStrictEqual(event.detail, obj); + }); + et.addEventListener('$', fn); + et.dispatchEvent(ev); +} + +{ + const eventTarget = new EventTarget(); + const event = new CustomEvent('$'); + eventTarget.dispatchEvent(event); + strictEqual(event.target, eventTarget); +} + +{ + const obj = { sweet: 'x' }; + const eventTarget = new EventTarget(); + + const ev1 = common.mustCall(function(event) { + strictEqual(event.type, 'foo'); + strictEqual(event.detail, obj); + strictEqual(this, eventTarget); + strictEqual(event.eventPhase, 2); + }, 2); + + const ev2 = { + handleEvent: common.mustCall(function(event) { + strictEqual(event.type, 'foo'); + strictEqual(event.detail, obj); + strictEqual(this, ev2); + }), + }; + + eventTarget.addEventListener('foo', ev1); + eventTarget.addEventListener('foo', ev2, { once: true }); + ok(eventTarget.dispatchEvent(new CustomEvent('foo', { detail: obj }))); + eventTarget.dispatchEvent(new CustomEvent('foo', { detail: obj })); + + eventTarget.removeEventListener('foo', ev1); + eventTarget.dispatchEvent(new CustomEvent('foo')); +} + +{ + // Same event dispatched multiple times. + const obj = { sweet: 'x' }; + const event = new CustomEvent('foo', { detail: obj }); + const eventTarget1 = new EventTarget(); + const eventTarget2 = new EventTarget(); + + eventTarget1.addEventListener( + 'foo', + common.mustCall((event) => { + strictEqual(event.eventPhase, CustomEvent.AT_TARGET); + strictEqual(event.target, eventTarget1); + strictEqual(event.detail, obj); + deepStrictEqual(event.composedPath(), [eventTarget1]); + }), + ); + + eventTarget2.addEventListener( + 'foo', + common.mustCall((event) => { + strictEqual(event.eventPhase, CustomEvent.AT_TARGET); + strictEqual(event.target, eventTarget2); + strictEqual(event.detail, obj); + deepStrictEqual(event.composedPath(), [eventTarget2]); + }), + ); + + eventTarget1.dispatchEvent(event); + strictEqual(event.eventPhase, CustomEvent.NONE); + strictEqual(event.target, eventTarget1); + deepStrictEqual(event.composedPath(), []); + + eventTarget2.dispatchEvent(event); + strictEqual(event.eventPhase, CustomEvent.NONE); + strictEqual(event.target, eventTarget2); + deepStrictEqual(event.composedPath(), []); +} + +{ + const obj = { sweet: 'x' }; + const target = new EventTarget(); + const event = new CustomEvent('foo', { detail: obj }); + + strictEqual(event.target, null); + + target.addEventListener( + 'foo', + common.mustCall((event) => { + strictEqual(event.target, target); + strictEqual(event.currentTarget, target); + strictEqual(event.srcElement, target); + strictEqual(event.detail, obj); + }), + ); + target.dispatchEvent(event); +} + +{ + // Event subclassing + const SubEvent = class extends CustomEvent {}; + const ev = new SubEvent('foo', { detail: 56 }); + const eventTarget = new EventTarget(); + const fn = common.mustCall((event) => { + strictEqual(event, ev); + strictEqual(event.detail, 56); + }); + eventTarget.addEventListener('foo', fn, { once: true }); + eventTarget.dispatchEvent(ev); +} + +// Works with inspect + +{ + const ev = new CustomEvent('test'); + // TODO: unskip + // const evConstructorName = inspect(ev, { + // depth: -1, + // }); + // strictEqual(evConstructorName, 'CustomEvent'); + + const inspectResult = inspect(ev, { + depth: 1, + }); + ok(inspectResult.includes('CustomEvent')); +} diff --git a/test/js/node/test/parallel/test-events-listener-count-with-listener.js b/test/js/node/test/parallel/test-events-listener-count-with-listener.js new file mode 100644 index 00000000000000..1696cb1c902cb9 --- /dev/null +++ b/test/js/node/test/parallel/test-events-listener-count-with-listener.js @@ -0,0 +1,65 @@ +'use strict'; + +const common = require('../common'); +const EventEmitter = require('events'); +const assert = require('assert'); + +const EE = new EventEmitter(); +const handler = common.mustCall(undefined, 3); +const anotherHandler = common.mustCall(); + +assert.strictEqual(EE.listenerCount('event'), 0); +assert.strictEqual(EE.listenerCount('event', handler), 0); +assert.strictEqual(EE.listenerCount('event', anotherHandler), 0); + +EE.once('event', handler); + +assert.strictEqual(EE.listenerCount('event'), 1); +assert.strictEqual(EE.listenerCount('event', handler), 1); +assert.strictEqual(EE.listenerCount('event', anotherHandler), 0); + +EE.removeAllListeners('event'); + +assert.strictEqual(EE.listenerCount('event'), 0); +assert.strictEqual(EE.listenerCount('event', handler), 0); +assert.strictEqual(EE.listenerCount('event', anotherHandler), 0); + +EE.on('event', handler); + +assert.strictEqual(EE.listenerCount('event'), 1); +assert.strictEqual(EE.listenerCount('event', handler), 1); +assert.strictEqual(EE.listenerCount('event', anotherHandler), 0); + +EE.once('event', anotherHandler); + +assert.strictEqual(EE.listenerCount('event'), 2); +assert.strictEqual(EE.listenerCount('event', handler), 1); +assert.strictEqual(EE.listenerCount('event', anotherHandler), 1); + +assert.strictEqual(EE.listenerCount('another-event'), 0); +assert.strictEqual(EE.listenerCount('another-event', handler), 0); +assert.strictEqual(EE.listenerCount('another-event', anotherHandler), 0); + +EE.once('event', handler); + +assert.strictEqual(EE.listenerCount('event'), 3); +assert.strictEqual(EE.listenerCount('event', handler), 2); +assert.strictEqual(EE.listenerCount('event', anotherHandler), 1); + +EE.emit('event'); + +assert.strictEqual(EE.listenerCount('event'), 1); +assert.strictEqual(EE.listenerCount('event', handler), 1); +assert.strictEqual(EE.listenerCount('event', anotherHandler), 0); + +EE.emit('event'); + +assert.strictEqual(EE.listenerCount('event'), 1); +assert.strictEqual(EE.listenerCount('event', handler), 1); +assert.strictEqual(EE.listenerCount('event', anotherHandler), 0); + +EE.off('event', handler); + +assert.strictEqual(EE.listenerCount('event'), 0); +assert.strictEqual(EE.listenerCount('event', handler), 0); +assert.strictEqual(EE.listenerCount('event', anotherHandler), 0); diff --git a/test/js/node/test/parallel/test-events-on-async-iterator.js b/test/js/node/test/parallel/test-events-on-async-iterator.js new file mode 100644 index 00000000000000..298b1597db6ee0 --- /dev/null +++ b/test/js/node/test/parallel/test-events-on-async-iterator.js @@ -0,0 +1,427 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { on, EventEmitter, listenerCount } = require('events'); + +async function basic() { + const ee = new EventEmitter(); + process.nextTick(() => { + ee.emit('foo', 'bar'); + // 'bar' is a spurious event, we are testing + // that it does not show up in the iterable + ee.emit('bar', 24); + ee.emit('foo', 42); + }); + + const iterable = on(ee, 'foo'); + + const expected = [['bar'], [42]]; + + for await (const event of iterable) { + const current = expected.shift(); + + assert.deepStrictEqual(current, event); + + if (expected.length === 0) { + break; + } + } + assert.strictEqual(ee.listenerCount('foo'), 0); + assert.strictEqual(ee.listenerCount('error'), 0); +} + +async function invalidArgType() { + assert.throws(() => on({}, 'foo'), common.expectsError({ + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + })); + + const ee = new EventEmitter(); + + [1, 'hi', null, false, () => {}, Symbol(), 1n].map((options) => { + return assert.throws(() => on(ee, 'foo', options), common.expectsError({ + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + })); + }); +} + +async function error() { + const ee = new EventEmitter(); + const _err = new Error('kaboom'); + process.nextTick(() => { + ee.emit('error', _err); + }); + + const iterable = on(ee, 'foo'); + let looped = false; + let thrown = false; + + try { + // eslint-disable-next-line no-unused-vars + for await (const event of iterable) { + looped = true; + } + } catch (err) { + thrown = true; + assert.strictEqual(err, _err); + } + assert.strictEqual(thrown, true); + assert.strictEqual(looped, false); +} + +async function errorDelayed() { + const ee = new EventEmitter(); + const _err = new Error('kaboom'); + process.nextTick(() => { + ee.emit('foo', 42); + ee.emit('error', _err); + }); + + const iterable = on(ee, 'foo'); + const expected = [[42]]; + let thrown = false; + + try { + for await (const event of iterable) { + const current = expected.shift(); + assert.deepStrictEqual(current, event); + } + } catch (err) { + thrown = true; + assert.strictEqual(err, _err); + } + assert.strictEqual(thrown, true); + assert.strictEqual(ee.listenerCount('foo'), 0); + assert.strictEqual(ee.listenerCount('error'), 0); +} + +async function throwInLoop() { + const ee = new EventEmitter(); + const _err = new Error('kaboom'); + + process.nextTick(() => { + ee.emit('foo', 42); + }); + + try { + for await (const event of on(ee, 'foo')) { + assert.deepStrictEqual(event, [42]); + throw _err; + } + } catch (err) { + assert.strictEqual(err, _err); + } + + assert.strictEqual(ee.listenerCount('foo'), 0); + assert.strictEqual(ee.listenerCount('error'), 0); +} + +async function next() { + const ee = new EventEmitter(); + const iterable = on(ee, 'foo'); + + process.nextTick(function() { + ee.emit('foo', 'bar'); + ee.emit('foo', 42); + iterable.return(); + }); + + const results = await Promise.all([ + iterable.next(), + iterable.next(), + iterable.next(), + ]); + + assert.deepStrictEqual(results, [{ + value: ['bar'], + done: false, + }, { + value: [42], + done: false, + }, { + value: undefined, + done: true, + }]); + + assert.deepStrictEqual(await iterable.next(), { + value: undefined, + done: true, + }); +} + +async function nextError() { + const ee = new EventEmitter(); + const iterable = on(ee, 'foo'); + const _err = new Error('kaboom'); + process.nextTick(function() { + ee.emit('error', _err); + }); + const results = await Promise.allSettled([ + iterable.next(), + iterable.next(), + iterable.next(), + ]); + assert.deepStrictEqual(results, [{ + status: 'rejected', + reason: _err, + }, { + status: 'fulfilled', + value: { + value: undefined, + done: true, + }, + }, { + status: 'fulfilled', + value: { + value: undefined, + done: true, + }, + }]); + assert.strictEqual(ee.listeners('error').length, 0); +} + +async function iterableThrow() { + const ee = new EventEmitter(); + const iterable = on(ee, 'foo'); + + process.nextTick(() => { + ee.emit('foo', 'bar'); + ee.emit('foo', 42); // lost in the queue + iterable.throw(_err); + }); + + const _err = new Error('kaboom'); + let thrown = false; + + assert.throws(() => { + // No argument + iterable.throw(); + }, { + name: 'TypeError', + }); + + const expected = [['bar'], [42]]; + + try { + for await (const event of iterable) { + assert.deepStrictEqual(event, expected.shift()); + } + } catch (err) { + thrown = true; + assert.strictEqual(err, _err); + } + assert.strictEqual(thrown, true); + assert.strictEqual(expected.length, 0); + assert.strictEqual(ee.listenerCount('foo'), 0); + assert.strictEqual(ee.listenerCount('error'), 0); +} + +async function eventTarget() { + const et = new EventTarget(); + const tick = () => et.dispatchEvent(new Event('tick')); + const interval = setInterval(tick, 0); + let count = 0; + for await (const [ event ] of on(et, 'tick')) { + count++; + assert.strictEqual(event.type, 'tick'); + if (count >= 5) { + break; + } + } + assert.strictEqual(count, 5); + clearInterval(interval); +} + +async function errorListenerCount() { + const et = new EventEmitter(); + on(et, 'foo'); + assert.strictEqual(et.listenerCount('error'), 1); +} + +// async function nodeEventTarget() { +// const et = new NodeEventTarget(); +// const tick = () => et.dispatchEvent(new Event('tick')); +// const interval = setInterval(tick, 0); +// let count = 0; +// for await (const [ event] of on(et, 'tick')) { +// count++; +// assert.strictEqual(event.type, 'tick'); +// if (count >= 5) { +// break; +// } +// } +// assert.strictEqual(count, 5); +// clearInterval(interval); +// } + +async function abortableOnBefore() { + const ee = new EventEmitter(); + const abortedSignal = AbortSignal.abort(); + [1, {}, null, false, 'hi'].forEach((signal) => { + assert.throws(() => on(ee, 'foo', { signal }), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + assert.throws(() => on(ee, 'foo', { signal: abortedSignal }), { + name: 'AbortError', + }); +} + +async function eventTargetAbortableOnBefore() { + const et = new EventTarget(); + const abortedSignal = AbortSignal.abort(); + [1, {}, null, false, 'hi'].forEach((signal) => { + assert.throws(() => on(et, 'foo', { signal }), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + assert.throws(() => on(et, 'foo', { signal: abortedSignal }), { + name: 'AbortError', + }); +} + +async function abortableOnAfter() { + const ee = new EventEmitter(); + const ac = new AbortController(); + + const i = setInterval(() => ee.emit('foo', 'foo'), 10); + + async function foo() { + for await (const f of on(ee, 'foo', { signal: ac.signal })) { + assert.strictEqual(f, 'foo'); + } + } + + foo().catch(common.mustCall((error) => { + assert.strictEqual(error.name, 'AbortError'); + })).finally(() => { + clearInterval(i); + }); + + process.nextTick(() => ac.abort()); +} + +async function eventTargetAbortableOnAfter() { + const et = new EventTarget(); + const ac = new AbortController(); + + const i = setInterval(() => et.dispatchEvent(new Event('foo')), 10); + + async function foo() { + for await (const f of on(et, 'foo', { signal: ac.signal })) { + assert(f); + } + } + + foo().catch(common.mustCall((error) => { + assert.strictEqual(error.name, 'AbortError'); + })).finally(() => { + clearInterval(i); + }); + + process.nextTick(() => ac.abort()); +} + +async function eventTargetAbortableOnAfter2() { + const et = new EventTarget(); + const ac = new AbortController(); + + const i = setInterval(() => et.dispatchEvent(new Event('foo')), 10); + + async function foo() { + for await (const f of on(et, 'foo', { signal: ac.signal })) { + assert(f); + // Cancel after a single event has been triggered. + ac.abort(); + } + } + + foo().catch(common.mustCall((error) => { + assert.strictEqual(error.name, 'AbortError'); + })).finally(() => { + clearInterval(i); + }); +} + +async function abortableOnAfterDone() { + const ee = new EventEmitter(); + const ac = new AbortController(); + + const i = setInterval(() => ee.emit('foo', 'foo'), 1); + let count = 0; + + async function foo() { + for await (const f of on(ee, 'foo', { signal: ac.signal })) { + assert.strictEqual(f[0], 'foo'); + if (++count === 5) + break; + } + ac.abort(); // No error will occur + } + + foo().finally(() => { + clearInterval(i); + }); +} + +async function abortListenerRemovedAfterComplete() { + const ee = new EventEmitter(); + const ac = new AbortController(); + + const i = setInterval(() => ee.emit('foo', 'foo'), 1); + try { + // Below: either the kEvents map is empty or the 'abort' listener list is empty + + // Return case + const endedIterator = on(ee, 'foo', { signal: ac.signal }); + assert.ok(listenerCount(ac.signal, 'abort') > 0); + endedIterator.return(); + assert.strictEqual(listenerCount(ac.signal, 'abort') ?? listenerCount(ac.signal), 0); + + // Throw case + const throwIterator = on(ee, 'foo', { signal: ac.signal }); + assert.ok(listenerCount(ac.signal, 'abort') > 0); + throwIterator.throw(new Error()); + assert.strictEqual(listenerCount(ac.signal, 'abort') ?? listenerCount(ac.signal), 0); + + // Abort case + on(ee, 'foo', { signal: ac.signal }); + assert.ok(listenerCount(ac.signal, 'abort') > 0); + ac.abort(new Error()); + assert.strictEqual(listenerCount(ac.signal, 'abort') ?? listenerCount(ac.signal), 0); + } finally { + clearInterval(i); + } +} + +async function run() { + const funcs = [ + basic, + invalidArgType, + error, + errorDelayed, + throwInLoop, + next, + nextError, + iterableThrow, + eventTarget, + errorListenerCount, + // nodeEventTarget, + abortableOnBefore, + abortableOnAfter, + eventTargetAbortableOnBefore, + eventTargetAbortableOnAfter, + eventTargetAbortableOnAfter2, + abortableOnAfterDone, + abortListenerRemovedAfterComplete, + ]; + + for (const fn of funcs) { + await fn(); + } +} + +run().then(common.mustCall()); diff --git a/test/js/node/test/parallel/test-events-once.js b/test/js/node/test/parallel/test-events-once.js new file mode 100644 index 00000000000000..25357fde67f8d0 --- /dev/null +++ b/test/js/node/test/parallel/test-events-once.js @@ -0,0 +1,287 @@ +'use strict'; +// Flags: --expose-internals --no-warnings + +const common = require('../common'); +const { once, EventEmitter, listenerCount } = require('events'); +const { + deepStrictEqual, + fail, + rejects, + strictEqual, + throws, +} = require('assert'); + +async function onceAnEvent() { + const ee = new EventEmitter(); + + process.nextTick(() => { + ee.emit('myevent', 42); + }); + + const [value] = await once(ee, 'myevent'); + strictEqual(value, 42); + strictEqual(ee.listenerCount('error'), 0); + strictEqual(ee.listenerCount('myevent'), 0); +} + +async function onceAnEventWithInvalidOptions() { + const ee = new EventEmitter(); + + await Promise.all([1, 'hi', null, false, () => {}, Symbol(), 1n].map((options) => { + return throws(() => once(ee, 'myevent', options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + })); +} + +async function onceAnEventWithTwoArgs() { + const ee = new EventEmitter(); + + process.nextTick(() => { + ee.emit('myevent', 42, 24); + }); + + const value = await once(ee, 'myevent'); + deepStrictEqual(value, [42, 24]); +} + +async function catchesErrors() { + const ee = new EventEmitter(); + + const expected = new Error('kaboom'); + let err; + process.nextTick(() => { + ee.emit('error', expected); + }); + + try { + await once(ee, 'myevent'); + } catch (_e) { + err = _e; + } + strictEqual(err, expected); + strictEqual(ee.listenerCount('error'), 0); + strictEqual(ee.listenerCount('myevent'), 0); +} + +async function catchesErrorsWithAbortSignal() { + const ee = new EventEmitter(); + const ac = new AbortController(); + const signal = ac.signal; + + const expected = new Error('boom'); + let err; + process.nextTick(() => { + ee.emit('error', expected); + }); + + try { + const promise = once(ee, 'myevent', { signal }); + strictEqual(ee.listenerCount('error'), 1); + strictEqual(listenerCount(signal, "abort"), 1); + + await promise; + } catch (e) { + err = e; + } + strictEqual(err, expected); + strictEqual(ee.listenerCount('error'), 0); + strictEqual(ee.listenerCount('myevent'), 0); + strictEqual(listenerCount(signal, "abort"), 0); +} + +async function stopListeningAfterCatchingError() { + const ee = new EventEmitter(); + + const expected = new Error('kaboom'); + let err; + process.nextTick(() => { + ee.emit('error', expected); + ee.emit('myevent', 42, 24); + }); + + try { + await once(ee, 'myevent'); + } catch (_e) { + err = _e; + } + process.removeAllListeners('multipleResolves'); + strictEqual(err, expected); + strictEqual(ee.listenerCount('error'), 0); + strictEqual(ee.listenerCount('myevent'), 0); +} + +async function onceError() { + const ee = new EventEmitter(); + + const expected = new Error('kaboom'); + process.nextTick(() => { + ee.emit('error', expected); + }); + + const promise = once(ee, 'error'); + strictEqual(ee.listenerCount('error'), 1); + const [ err ] = await promise; + strictEqual(err, expected); + strictEqual(ee.listenerCount('error'), 0); + strictEqual(ee.listenerCount('myevent'), 0); +} + +async function onceWithEventTarget() { + const et = new EventTarget(); + const event = new Event('myevent'); + process.nextTick(() => { + et.dispatchEvent(event); + }); + const [ value ] = await once(et, 'myevent'); + strictEqual(value, event); +} + +async function onceWithEventTargetError() { + const et = new EventTarget(); + const error = new Event('error'); + process.nextTick(() => { + et.dispatchEvent(error); + }); + + const [ err ] = await once(et, 'error'); + strictEqual(err, error); +} + +async function onceWithInvalidEventEmmiter() { + const ac = new AbortController(); + return throws(() => once(ac, 'myevent'), { + code: 'ERR_INVALID_ARG_TYPE', + }); +} + +async function prioritizesEventEmitter() { + const ee = new EventEmitter(); + ee.addEventListener = fail; + ee.removeAllListeners = fail; + process.nextTick(() => ee.emit('foo')); + await once(ee, 'foo'); +} + +async function abortSignalBefore() { + const ee = new EventEmitter(); + ee.on('error', common.mustNotCall()); + const abortedSignal = AbortSignal.abort(); + + await Promise.all([1, {}, 'hi', null, false].map((signal) => { + return throws(() => once(ee, 'foo', { signal }), { + code: 'ERR_INVALID_ARG_TYPE', + }); + })); + + return throws(() => once(ee, 'foo', { signal: abortedSignal }), { + name: 'AbortError', + }); +} + +async function abortSignalAfter() { + const ee = new EventEmitter(); + const ac = new AbortController(); + ee.on('error', common.mustNotCall()); + const r = rejects(once(ee, 'foo', { signal: ac.signal }), { + name: 'AbortError', + }); + process.nextTick(() => ac.abort()); + return r; +} + +async function abortSignalAfterEvent() { + const ee = new EventEmitter(); + const ac = new AbortController(); + process.nextTick(() => { + ee.emit('foo'); + ac.abort(); + }); + const promise = once(ee, 'foo', { signal: ac.signal }); + strictEqual(listenerCount(ac.signal, "abort"), 1); + await promise; + strictEqual(listenerCount(ac.signal, "abort"), 0); +} + +async function abortSignalRemoveListener() { + const ee = new EventEmitter(); + const ac = new AbortController(); + + try { + process.nextTick(() => ac.abort()); + await once(ee, 'test', { signal: ac.signal }); + } catch { + strictEqual(ee.listeners('test').length, 0); + strictEqual(ee.listeners('error').length, 0); + } +} + +async function eventTargetAbortSignalBefore() { + const et = new EventTarget(); + const abortedSignal = AbortSignal.abort(); + + await Promise.all([1, {}, 'hi', null, false].map((signal) => { + return throws(() => once(et, 'foo', { signal }), { + code: 'ERR_INVALID_ARG_TYPE', + }); + })); + + return throws(() => once(et, 'foo', { signal: abortedSignal }), { + name: 'AbortError', + }); +} + +// TODO: unskip +// async function eventTargetAbortSignalBeforeEvenWhenSignalPropagationStopped() { +// const et = new EventTarget(); +// const ac = new AbortController(); +// const { signal } = ac; +// signal.addEventListener('abort', (e) => e.stopImmediatePropagation(), { once: true }); + +// process.nextTick(() => ac.abort()); +// return rejects(once(et, 'foo', { signal }), { +// name: 'AbortError', +// }); +// } + +async function eventTargetAbortSignalAfter() { + const et = new EventTarget(); + const ac = new AbortController(); + const r = rejects(once(et, 'foo', { signal: ac.signal }), { + name: 'AbortError', + }); + process.nextTick(() => ac.abort()); + return r; +} + +async function eventTargetAbortSignalAfterEvent() { + const et = new EventTarget(); + const ac = new AbortController(); + process.nextTick(() => { + et.dispatchEvent(new Event('foo')); + ac.abort(); + }); + await once(et, 'foo', { signal: ac.signal }); +} + +Promise.all([ + onceAnEvent(), + onceAnEventWithInvalidOptions(), + onceAnEventWithTwoArgs(), + catchesErrors(), + catchesErrorsWithAbortSignal(), + stopListeningAfterCatchingError(), + onceError(), + onceWithEventTarget(), + onceWithEventTargetError(), + onceWithInvalidEventEmmiter(), + prioritizesEventEmitter(), + abortSignalBefore(), + abortSignalAfter(), + abortSignalAfterEvent(), + abortSignalRemoveListener(), + eventTargetAbortSignalBefore(), + // eventTargetAbortSignalBeforeEvenWhenSignalPropagationStopped(), + eventTargetAbortSignalAfter(), + eventTargetAbortSignalAfterEvent(), +]).then(common.mustCall()); diff --git a/test/js/node/test/parallel/test-events-static-geteventlisteners.js b/test/js/node/test/parallel/test-events-static-geteventlisteners.js new file mode 100644 index 00000000000000..35b4e34325a8b5 --- /dev/null +++ b/test/js/node/test/parallel/test-events-static-geteventlisteners.js @@ -0,0 +1,51 @@ +'use strict'; +// Flags: --expose-internals --no-warnings +const common = require('../common'); + +const { + deepStrictEqual, + throws, +} = require('assert'); + +const { getEventListeners, EventEmitter } = require('events'); + +// Test getEventListeners on EventEmitter +{ + const fn1 = common.mustNotCall(); + const fn2 = common.mustNotCall(); + const emitter = new EventEmitter(); + emitter.on('foo', fn1); + emitter.on('foo', fn2); + emitter.on('baz', fn1); + emitter.on('baz', fn1); + deepStrictEqual(getEventListeners(emitter, 'foo'), [fn1, fn2]); + deepStrictEqual(getEventListeners(emitter, 'bar'), []); + deepStrictEqual(getEventListeners(emitter, 'baz'), [fn1, fn1]); +} +// Test getEventListeners on EventTarget +{ + const fn1 = common.mustNotCall(); + const fn2 = common.mustNotCall(); + const target = new EventTarget(); + target.addEventListener('foo', fn1); + target.addEventListener('foo', fn2); + target.addEventListener('baz', fn1); + target.addEventListener('baz', fn1); + deepStrictEqual(getEventListeners(target, 'foo'), [fn1, fn2]); + deepStrictEqual(getEventListeners(target, 'bar'), []); + deepStrictEqual(getEventListeners(target, 'baz'), [fn1]); +} + +{ + throws(() => { + getEventListeners('INVALID_EMITTER'); + }, /ERR_INVALID_ARG_TYPE/); +} +// { +// // Test weak listeners +// const target = new EventTarget(); +// const fn = common.mustNotCall(); +// target.addEventListener('foo', fn, { [kWeakHandler]: {} }); +// const listeners = getEventListeners(target, 'foo'); +// deepStrictEqual(listeners, [fn]); +// } diff --git a/test/js/node/test/parallel/test-fs-readfile-flags.js b/test/js/node/test/parallel/test-fs-readfile-flags.js deleted file mode 100644 index 72b910aeeb48d6..00000000000000 --- a/test/js/node/test/parallel/test-fs-readfile-flags.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict'; - -// Test of fs.readFile with different flags. -const common = require('../common'); -const fs = require('fs'); -const assert = require('assert'); -const tmpdir = require('../common/tmpdir'); - -tmpdir.refresh(); - -{ - const emptyFile = tmpdir.resolve('empty.txt'); - fs.closeSync(fs.openSync(emptyFile, 'w')); - - fs.readFile( - emptyFile, - // With `a+` the file is created if it does not exist - common.mustNotMutateObjectDeep({ encoding: 'utf8', flag: 'a+' }), - common.mustCall((err, data) => { assert.strictEqual(data, ''); }) - ); - - fs.readFile( - emptyFile, - // Like `a+` but fails if the path exists. - common.mustNotMutateObjectDeep({ encoding: 'utf8', flag: 'ax+' }), - common.mustCall((err, data) => { assert.strictEqual(err.code, 'EEXIST'); }) - ); -} - -{ - const willBeCreated = tmpdir.resolve('will-be-created'); - - fs.readFile( - willBeCreated, - // With `a+` the file is created if it does not exist - common.mustNotMutateObjectDeep({ encoding: 'utf8', flag: 'a+' }), - common.mustCall((err, data) => { assert.strictEqual(data, ''); }) - ); -} - -{ - const willNotBeCreated = tmpdir.resolve('will-not-be-created'); - - fs.readFile( - willNotBeCreated, - // Default flag is `r`. An exception occurs if the file does not exist. - common.mustNotMutateObjectDeep({ encoding: 'utf8' }), - common.mustCall((err, data) => { assert.strictEqual(err.code, 'ENOENT'); }) - ); -} diff --git a/test/js/node/test/parallel/test-fs-watch-file-enoent-after-deletion.js b/test/js/node/test/parallel/test-fs-watch-file-enoent-after-deletion.js deleted file mode 100644 index e4baf90fd17b94..00000000000000 --- a/test/js/node/test/parallel/test-fs-watch-file-enoent-after-deletion.js +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; -const common = require('../common'); - -// Make sure the deletion event gets reported in the following scenario: -// 1. Watch a file. -// 2. The initial stat() goes okay. -// 3. Something deletes the watched file. -// 4. The second stat() fails with ENOENT. - -// The second stat() translates into the first 'change' event but a logic error -// stopped it from getting emitted. -// https://github.com/nodejs/node-v0.x-archive/issues/4027 - -const fs = require('fs'); - -const tmpdir = require('../common/tmpdir'); -tmpdir.refresh(); - -const filename = tmpdir.resolve('watched'); -fs.writeFileSync(filename, 'quis custodiet ipsos custodes'); - -fs.watchFile(filename, { interval: 50 }, common.mustCall(function(curr, prev) { - fs.unwatchFile(filename); -})); - -fs.unlinkSync(filename); diff --git a/test/js/node/test/parallel/test-path-parse-format.js b/test/js/node/test/parallel/test-path-parse-format.js new file mode 100644 index 00000000000000..ca14120422b2ee --- /dev/null +++ b/test/js/node/test/parallel/test-path-parse-format.js @@ -0,0 +1,226 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const path = require('path'); + +const winPaths = [ + // [path, root] + ['C:\\path\\dir\\index.html', 'C:\\'], + ['C:\\another_path\\DIR\\1\\2\\33\\\\index', 'C:\\'], + ['another_path\\DIR with spaces\\1\\2\\33\\index', ''], + ['\\', '\\'], + ['\\foo\\C:', '\\'], + ['file', ''], + ['file:stream', ''], + ['.\\file', ''], + ['C:', 'C:'], + ['C:.', 'C:'], + ['C:..', 'C:'], + ['C:abc', 'C:'], + ['C:\\', 'C:\\'], + ['C:\\abc', 'C:\\' ], + ['', ''], + + // unc + ['\\\\server\\share\\file_path', '\\\\server\\share\\'], + ['\\\\server two\\shared folder\\file path.zip', + '\\\\server two\\shared folder\\'], + ['\\\\teela\\admin$\\system32', '\\\\teela\\admin$\\'], + ['\\\\?\\UNC\\server\\share', '\\\\?\\UNC\\'], +]; + +const winSpecialCaseParseTests = [ + ['t', { base: 't', name: 't', root: '', dir: '', ext: '' }], + ['/foo/bar', { root: '/', dir: '/foo', base: 'bar', ext: '', name: 'bar' }], +]; + +const winSpecialCaseFormatTests = [ + [{ dir: 'some\\dir' }, 'some\\dir\\'], + [{ base: 'index.html' }, 'index.html'], + [{ root: 'C:\\' }, 'C:\\'], + [{ name: 'index', ext: '.html' }, 'index.html'], + [{ dir: 'some\\dir', name: 'index', ext: '.html' }, 'some\\dir\\index.html'], + [{ root: 'C:\\', name: 'index', ext: '.html' }, 'C:\\index.html'], + [{}, ''], +]; + +const unixPaths = [ + // [path, root] + ['/home/user/dir/file.txt', '/'], + ['/home/user/a dir/another File.zip', '/'], + ['/home/user/a dir//another&File.', '/'], + ['/home/user/a$$$dir//another File.zip', '/'], + ['user/dir/another File.zip', ''], + ['file', ''], + ['.\\file', ''], + ['./file', ''], + ['C:\\foo', ''], + ['/', '/'], + ['', ''], + ['.', ''], + ['..', ''], + ['/foo', '/'], + ['/foo.', '/'], + ['/foo.bar', '/'], + ['/.', '/'], + ['/.foo', '/'], + ['/.foo.bar', '/'], + ['/foo/bar.baz', '/'], +]; + +const unixSpecialCaseFormatTests = [ + [{ dir: 'some/dir' }, 'some/dir/'], + [{ base: 'index.html' }, 'index.html'], + [{ root: '/' }, '/'], + [{ name: 'index', ext: '.html' }, 'index.html'], + [{ dir: 'some/dir', name: 'index', ext: '.html' }, 'some/dir/index.html'], + [{ root: '/', name: 'index', ext: '.html' }, '/index.html'], + [{}, ''], +]; + +const errors = [ + { method: 'parse', input: [null] }, + { method: 'parse', input: [{}] }, + { method: 'parse', input: [true] }, + { method: 'parse', input: [1] }, + { method: 'parse', input: [] }, + { method: 'format', input: [null] }, + { method: 'format', input: [''] }, + { method: 'format', input: [true] }, + { method: 'format', input: [1] }, +]; + +checkParseFormat(path.win32, winPaths); +checkParseFormat(path.posix, unixPaths); +checkSpecialCaseParseFormat(path.win32, winSpecialCaseParseTests); +checkErrors(path.win32); +checkErrors(path.posix); +checkFormat(path.win32, winSpecialCaseFormatTests); +checkFormat(path.posix, unixSpecialCaseFormatTests); + +// Test removal of trailing path separators +const trailingTests = [ + [ path.win32.parse, + [['.\\', { root: '', dir: '', base: '.', ext: '', name: '.' }], + ['\\\\', { root: '\\', dir: '\\', base: '', ext: '', name: '' }], + ['\\\\', { root: '\\', dir: '\\', base: '', ext: '', name: '' }], + ['c:\\foo\\\\\\', + { root: 'c:\\', dir: 'c:\\', base: 'foo', ext: '', name: 'foo' }], + ['D:\\foo\\\\\\bar.baz', + { root: 'D:\\', + dir: 'D:\\foo\\\\', + base: 'bar.baz', + ext: '.baz', + name: 'bar' }, + ], + ], + ], + [ path.posix.parse, + [['./', { root: '', dir: '', base: '.', ext: '', name: '.' }], + ['//', { root: '/', dir: '/', base: '', ext: '', name: '' }], + ['///', { root: '/', dir: '/', base: '', ext: '', name: '' }], + ['/foo///', { root: '/', dir: '/', base: 'foo', ext: '', name: 'foo' }], + ['/foo///bar.baz', + { root: '/', dir: '/foo//', base: 'bar.baz', ext: '.baz', name: 'bar' }, + ], + ], + ], +]; +const failures = []; +for (const [parse, testList] of trailingTests) { + const os = parse === path.win32.parse ? 'win32' : 'posix'; + for (const [input, expected] of testList) { + const actual = parse(input); + const message = `path.${os}.parse(${JSON.stringify(input)})\n expect=${ + JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`; + const actualKeys = Object.keys(actual); + const expectedKeys = Object.keys(expected); + let failed = (actualKeys.length !== expectedKeys.length); + if (!failed) { + for (let i = 0; i < actualKeys.length; ++i) { + const key = actualKeys[i]; + if (!expectedKeys.includes(key) || actual[key] !== expected[key]) { + failed = true; + break; + } + } + } + if (failed) + failures.push(`\n${message}`); + } +} +assert.strictEqual(failures.length, 0, failures.join('')); + +function checkErrors(path) { + errors.forEach(({ method, input }) => { + assert.throws(() => { + path[method].apply(path, input); + }, { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError' + }); + }); +} + +function checkParseFormat(path, paths) { + paths.forEach(([element, root]) => { + const output = path.parse(element); + assert.strictEqual(typeof output.root, 'string'); + assert.strictEqual(typeof output.dir, 'string'); + assert.strictEqual(typeof output.base, 'string'); + assert.strictEqual(typeof output.ext, 'string'); + assert.strictEqual(typeof output.name, 'string'); + assert.strictEqual(path.format(output), element); + assert.strictEqual(output.root, root); + assert(output.dir.startsWith(output.root)); + assert.strictEqual(output.dir, output.dir ? path.dirname(element) : ''); + assert.strictEqual(output.base, path.basename(element)); + assert.strictEqual(output.ext, path.extname(element)); + }); +} + +function checkSpecialCaseParseFormat(path, testCases) { + testCases.forEach(([element, expect]) => { + assert.deepStrictEqual(path.parse(element), expect); + }); +} + +function checkFormat(path, testCases) { + testCases.forEach(([element, expect]) => { + assert.strictEqual(path.format(element), expect); + }); + + [null, undefined, 1, true, false, 'string'].forEach((pathObject) => { + assert.throws(() => { + path.format(pathObject); + }, { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + }); + }); +} + +// See https://github.com/nodejs/node/issues/44343 +assert.strictEqual(path.format({ name: 'x', ext: 'png' }), 'x.png'); +assert.strictEqual(path.format({ name: 'x', ext: '.png' }), 'x.png'); diff --git a/test/js/node/test/parallel/test-querystring-escape.js b/test/js/node/test/parallel/test-querystring-escape.js new file mode 100644 index 00000000000000..5f3ea3aedc4d05 --- /dev/null +++ b/test/js/node/test/parallel/test-querystring-escape.js @@ -0,0 +1,41 @@ +'use strict'; +require('../common'); +const assert = require('assert'); + +const qs = require('querystring'); + +assert.strictEqual(qs.escape(5), '5'); +assert.strictEqual(qs.escape('test'), 'test'); +assert.strictEqual(qs.escape({}), '%5Bobject%20Object%5D'); +assert.strictEqual(qs.escape([5, 10]), '5%2C10'); +assert.strictEqual(qs.escape('Ŋōđĕ'), '%C5%8A%C5%8D%C4%91%C4%95'); +assert.strictEqual(qs.escape('testŊōđĕ'), 'test%C5%8A%C5%8D%C4%91%C4%95'); +assert.strictEqual(qs.escape(`${String.fromCharCode(0xD800 + 1)}test`), + '%F0%90%91%B4est'); + +assert.throws( + () => qs.escape(String.fromCharCode(0xD800 + 1)), + { + code: 'ERR_INVALID_URI', + name: 'URIError', + message: 'URI malformed' + } +); + +// Using toString for objects +assert.strictEqual( + qs.escape({ test: 5, toString: () => 'test', valueOf: () => 10 }), + 'test' +); + +// `toString` is not callable, must throw an error. +// Error message will vary between different JavaScript engines, so only check +// that it is a `TypeError`. +assert.throws(() => qs.escape({ toString: 5 }), TypeError); + +// Should use valueOf instead of non-callable toString. +assert.strictEqual(qs.escape({ toString: 5, valueOf: () => 'test' }), 'test'); + +// Error message will vary between different JavaScript engines, so only check +// that it is a `TypeError`. +assert.throws(() => qs.escape(Symbol('test')), TypeError); diff --git a/test/js/node/test/parallel/test-querystring.js b/test/js/node/test/parallel/test-querystring.js new file mode 100644 index 00000000000000..b24ec5b569bd03 --- /dev/null +++ b/test/js/node/test/parallel/test-querystring.js @@ -0,0 +1,480 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +require('../common'); +const assert = require('assert'); +const inspect = require('util').inspect; + +// test using assert +const qs = require('querystring'); + +function createWithNoPrototype(properties) { + const noProto = { __proto__: null }; + properties.forEach((property) => { + noProto[property.key] = property.value; + }); + return noProto; +} +// Folding block, commented to pass gjslint +// {{{ +// [ wonkyQS, canonicalQS, obj ] +const qsTestCases = [ + ['__proto__=1', + '__proto__=1', + createWithNoPrototype([{ key: '__proto__', value: '1' }])], + ['__defineGetter__=asdf', + '__defineGetter__=asdf', + JSON.parse('{"__defineGetter__":"asdf"}')], + ['foo=918854443121279438895193', + 'foo=918854443121279438895193', + { 'foo': '918854443121279438895193' }], + ['foo=bar', 'foo=bar', { 'foo': 'bar' }], + ['foo=bar&foo=quux', 'foo=bar&foo=quux', { 'foo': ['bar', 'quux'] }], + ['foo=1&bar=2', 'foo=1&bar=2', { 'foo': '1', 'bar': '2' }], + ['my+weird+field=q1%212%22%27w%245%267%2Fz8%29%3F', + 'my%20weird%20field=q1!2%22\'w%245%267%2Fz8)%3F', + { 'my weird field': 'q1!2"\'w$5&7/z8)?' }], + ['foo%3Dbaz=bar', 'foo%3Dbaz=bar', { 'foo=baz': 'bar' }], + ['foo=baz=bar', 'foo=baz%3Dbar', { 'foo': 'baz=bar' }], + ['str=foo&arr=1&arr=2&arr=3&somenull=&undef=', + 'str=foo&arr=1&arr=2&arr=3&somenull=&undef=', + { 'str': 'foo', + 'arr': ['1', '2', '3'], + 'somenull': '', + 'undef': '' }], + [' foo = bar ', '%20foo%20=%20bar%20', { ' foo ': ' bar ' }], + ['foo=%zx', 'foo=%25zx', { 'foo': '%zx' }], + ['foo=%EF%BF%BD', 'foo=%EF%BF%BD', { 'foo': '\ufffd' }], + // See: https://github.com/joyent/node/issues/1707 + ['hasOwnProperty=x&toString=foo&valueOf=bar&__defineGetter__=baz', + 'hasOwnProperty=x&toString=foo&valueOf=bar&__defineGetter__=baz', + { hasOwnProperty: 'x', + toString: 'foo', + valueOf: 'bar', + __defineGetter__: 'baz' }], + // See: https://github.com/joyent/node/issues/3058 + ['foo&bar=baz', 'foo=&bar=baz', { foo: '', bar: 'baz' }], + ['a=b&c&d=e', 'a=b&c=&d=e', { a: 'b', c: '', d: 'e' }], + ['a=b&c=&d=e', 'a=b&c=&d=e', { a: 'b', c: '', d: 'e' }], + ['a=b&=c&d=e', 'a=b&=c&d=e', { 'a': 'b', '': 'c', 'd': 'e' }], + ['a=b&=&c=d', 'a=b&=&c=d', { 'a': 'b', '': '', 'c': 'd' }], + ['&&foo=bar&&', 'foo=bar', { foo: 'bar' }], + ['&', '', {}], + ['&&&&', '', {}], + ['&=&', '=', { '': '' }], + ['&=&=', '=&=', { '': [ '', '' ] }], + ['=', '=', { '': '' }], + ['+', '%20=', { ' ': '' }], + ['+=', '%20=', { ' ': '' }], + ['+&', '%20=', { ' ': '' }], + ['=+', '=%20', { '': ' ' }], + ['+=&', '%20=', { ' ': '' }], + ['a&&b', 'a=&b=', { 'a': '', 'b': '' }], + ['a=a&&b=b', 'a=a&b=b', { 'a': 'a', 'b': 'b' }], + ['&a', 'a=', { 'a': '' }], + ['&=', '=', { '': '' }], + ['a&a&', 'a=&a=', { a: [ '', '' ] }], + ['a&a&a&', 'a=&a=&a=', { a: [ '', '', '' ] }], + ['a&a&a&a&', 'a=&a=&a=&a=', { a: [ '', '', '', '' ] }], + ['a=&a=value&a=', 'a=&a=value&a=', { a: [ '', 'value', '' ] }], + ['foo+bar=baz+quux', 'foo%20bar=baz%20quux', { 'foo bar': 'baz quux' }], + ['+foo=+bar', '%20foo=%20bar', { ' foo': ' bar' }], + ['a+', 'a%20=', { 'a ': '' }], + ['=a+', '=a%20', { '': 'a ' }], + ['a+&', 'a%20=', { 'a ': '' }], + ['=a+&', '=a%20', { '': 'a ' }], + ['%20+', '%20%20=', { ' ': '' }], + ['=%20+', '=%20%20', { '': ' ' }], + ['%20+&', '%20%20=', { ' ': '' }], + ['=%20+&', '=%20%20', { '': ' ' }], + [null, '', {}], + [undefined, '', {}], +]; + +// [ wonkyQS, canonicalQS, obj ] +const qsColonTestCases = [ + ['foo:bar', 'foo:bar', { 'foo': 'bar' }], + ['foo:bar;foo:quux', 'foo:bar;foo:quux', { 'foo': ['bar', 'quux'] }], + ['foo:1&bar:2;baz:quux', + 'foo:1%26bar%3A2;baz:quux', + { 'foo': '1&bar:2', 'baz': 'quux' }], + ['foo%3Abaz:bar', 'foo%3Abaz:bar', { 'foo:baz': 'bar' }], + ['foo:baz:bar', 'foo:baz%3Abar', { 'foo': 'baz:bar' }], +]; + +// [wonkyObj, qs, canonicalObj] +function extendedFunction() {} +extendedFunction.prototype = { a: 'b' }; +const qsWeirdObjects = [ + // eslint-disable-next-line node-core/no-unescaped-regexp-dot + [{ regexp: /./g }, 'regexp=', { 'regexp': '' }], + // eslint-disable-next-line node-core/no-unescaped-regexp-dot + [{ regexp: new RegExp('.', 'g') }, 'regexp=', { 'regexp': '' }], + [{ fn: () => {} }, 'fn=', { 'fn': '' }], + [{ fn: new Function('') }, 'fn=', { 'fn': '' }], + [{ math: Math }, 'math=', { 'math': '' }], + [{ e: extendedFunction }, 'e=', { 'e': '' }], + [{ d: new Date() }, 'd=', { 'd': '' }], + [{ d: Date }, 'd=', { 'd': '' }], + [ + { f: new Boolean(false), t: new Boolean(true) }, + 'f=&t=', + { 'f': '', 't': '' }, + ], + [{ f: false, t: true }, 'f=false&t=true', { 'f': 'false', 't': 'true' }], + [{ n: null }, 'n=', { 'n': '' }], + [{ nan: NaN }, 'nan=', { 'nan': '' }], + [{ inf: Infinity }, 'inf=', { 'inf': '' }], + [{ a: [], b: [] }, '', {}], + [{ a: 1, b: [] }, 'a=1', { 'a': '1' }], +]; +// }}} + +const vm = require('vm'); +const foreignObject = vm.runInNewContext('({"foo": ["bar", "baz"]})'); + +const qsNoMungeTestCases = [ + ['', {}], + ['foo=bar&foo=baz', { 'foo': ['bar', 'baz'] }], + ['foo=bar&foo=baz', foreignObject], + ['blah=burp', { 'blah': 'burp' }], + ['a=!-._~\'()*', { 'a': '!-._~\'()*' }], + ['a=abcdefghijklmnopqrstuvwxyz', { 'a': 'abcdefghijklmnopqrstuvwxyz' }], + ['a=ABCDEFGHIJKLMNOPQRSTUVWXYZ', { 'a': 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' }], + ['a=0123456789', { 'a': '0123456789' }], + ['gragh=1&gragh=3&goo=2', { 'gragh': ['1', '3'], 'goo': '2' }], + ['frappucino=muffin&goat%5B%5D=scone&pond=moose', + { 'frappucino': 'muffin', 'goat[]': 'scone', 'pond': 'moose' }], + ['trololol=yes&lololo=no', { 'trololol': 'yes', 'lololo': 'no' }], +]; + +const qsUnescapeTestCases = [ + ['there is nothing to unescape here', + 'there is nothing to unescape here'], + ['there%20are%20several%20spaces%20that%20need%20to%20be%20unescaped', + 'there are several spaces that need to be unescaped'], + ['there%2Qare%0-fake%escaped values in%%%%this%9Hstring', + 'there%2Qare%0-fake%escaped values in%%%%this%9Hstring'], + ['%20%21%22%23%24%25%26%27%28%29%2A%2B%2C%2D%2E%2F%30%31%32%33%34%35%36%37', + ' !"#$%&\'()*+,-./01234567'], + ['%%2a', '%*'], + ['%2sf%2a', '%2sf*'], + ['%2%2af%2a', '%2*f*'], +]; + +assert.strictEqual(qs.parse('id=918854443121279438895193').id, + '918854443121279438895193'); + +function check(actual, expected, input) { + assert(!(actual instanceof Object)); + const actualKeys = Object.keys(actual).sort(); + const expectedKeys = Object.keys(expected).sort(); + let msg; + if (typeof input === 'string') { + msg = `Input: ${inspect(input)}\n` + + `Actual keys: ${inspect(actualKeys)}\n` + + `Expected keys: ${inspect(expectedKeys)}`; + } + assert.deepStrictEqual(actualKeys, expectedKeys, msg); + expectedKeys.forEach((key) => { + if (typeof input === 'string') { + msg = `Input: ${inspect(input)}\n` + + `Key: ${inspect(key)}\n` + + `Actual value: ${inspect(actual[key])}\n` + + `Expected value: ${inspect(expected[key])}`; + } else { + msg = undefined; + } + assert.deepStrictEqual(actual[key], expected[key], msg); + }); +} + +// Test that the canonical qs is parsed properly. +qsTestCases.forEach((testCase) => { + check(qs.parse(testCase[0]), testCase[2], testCase[0]); +}); + +// Test that the colon test cases can do the same +qsColonTestCases.forEach((testCase) => { + check(qs.parse(testCase[0], ';', ':'), testCase[2], testCase[0]); +}); + +// Test the weird objects, that they get parsed properly +qsWeirdObjects.forEach((testCase) => { + check(qs.parse(testCase[1]), testCase[2], testCase[1]); +}); + +qsNoMungeTestCases.forEach((testCase) => { + assert.deepStrictEqual(qs.stringify(testCase[1], '&', '='), testCase[0]); +}); + +// Test the nested qs-in-qs case +{ + const f = qs.parse('a=b&q=x%3Dy%26y%3Dz'); + check(f, createWithNoPrototype([ + { key: 'a', value: 'b' }, + { key: 'q', value: 'x=y&y=z' }, + ])); + + f.q = qs.parse(f.q); + const expectedInternal = createWithNoPrototype([ + { key: 'x', value: 'y' }, + { key: 'y', value: 'z' }, + ]); + check(f.q, expectedInternal); +} + +// nested in colon +{ + const f = qs.parse('a:b;q:x%3Ay%3By%3Az', ';', ':'); + check(f, createWithNoPrototype([ + { key: 'a', value: 'b' }, + { key: 'q', value: 'x:y;y:z' }, + ])); + f.q = qs.parse(f.q, ';', ':'); + const expectedInternal = createWithNoPrototype([ + { key: 'x', value: 'y' }, + { key: 'y', value: 'z' }, + ]); + check(f.q, expectedInternal); +} + +// Now test stringifying + +// basic +qsTestCases.forEach((testCase) => { + assert.strictEqual(qs.stringify(testCase[2]), testCase[1]); +}); + +qsColonTestCases.forEach((testCase) => { + assert.strictEqual(qs.stringify(testCase[2], ';', ':'), testCase[1]); +}); + +qsWeirdObjects.forEach((testCase) => { + assert.strictEqual(qs.stringify(testCase[0]), testCase[1]); +}); + +// BigInt values + +assert.strictEqual(qs.stringify({ foo: 2n ** 1023n }), + 'foo=' + 2n ** 1023n); +assert.strictEqual(qs.stringify([0n, 1n, 2n]), + '0=0&1=1&2=2'); + +assert.strictEqual(qs.stringify({ foo: 2n ** 1023n }, + null, + null, + { encodeURIComponent: (c) => c }), + 'foo=' + 2n ** 1023n); +assert.strictEqual(qs.stringify([0n, 1n, 2n], + null, + null, + { encodeURIComponent: (c) => c }), + '0=0&1=1&2=2'); + +// Invalid surrogate pair throws URIError +assert.throws( + () => qs.stringify({ foo: '\udc00' }), + { + code: 'ERR_INVALID_URI', + name: 'URIError', + message: 'URI malformed' + } +); + +// Coerce numbers to string +assert.strictEqual(qs.stringify({ foo: 0 }), 'foo=0'); +assert.strictEqual(qs.stringify({ foo: -0 }), 'foo=0'); +assert.strictEqual(qs.stringify({ foo: 3 }), 'foo=3'); +assert.strictEqual(qs.stringify({ foo: -72.42 }), 'foo=-72.42'); +assert.strictEqual(qs.stringify({ foo: NaN }), 'foo='); +assert.strictEqual(qs.stringify({ foo: 1e21 }), 'foo=1e%2B21'); +assert.strictEqual(qs.stringify({ foo: Infinity }), 'foo='); + +// nested +{ + const f = qs.stringify({ + a: 'b', + q: qs.stringify({ + x: 'y', + y: 'z' + }) + }); + assert.strictEqual(f, 'a=b&q=x%3Dy%26y%3Dz'); +} + +qs.parse(undefined); // Should not throw. + +// nested in colon +{ + const f = qs.stringify({ + a: 'b', + q: qs.stringify({ + x: 'y', + y: 'z' + }, ';', ':') + }, ';', ':'); + assert.strictEqual(f, 'a:b;q:x%3Ay%3By%3Az'); +} + +// empty string +assert.strictEqual(qs.stringify(), ''); +assert.strictEqual(qs.stringify(0), ''); +assert.strictEqual(qs.stringify([]), ''); +assert.strictEqual(qs.stringify(null), ''); +assert.strictEqual(qs.stringify(true), ''); + +check(qs.parse(), {}); + +// empty sep +check(qs.parse('a', []), { a: '' }); + +// empty eq +check(qs.parse('a', null, []), { '': 'a' }); + +// Test limiting +assert.strictEqual( + Object.keys(qs.parse('a=1&b=1&c=1', null, null, { maxKeys: 1 })).length, + 1); + +// Test limiting with a case that starts from `&` +assert.strictEqual( + Object.keys(qs.parse('&a', null, null, { maxKeys: 1 })).length, + 0); + +// Test removing limit +{ + function testUnlimitedKeys() { + const query = {}; + + for (let i = 0; i < 2000; i++) query[i] = i; + + const url = qs.stringify(query); + + assert.strictEqual( + Object.keys(qs.parse(url, null, null, { maxKeys: 0 })).length, + 2000); + } + + testUnlimitedKeys(); +} + +{ + const b = qs.unescapeBuffer('%d3%f2Ug%1f6v%24%5e%98%cb' + + '%0d%ac%a2%2f%9d%eb%d8%a2%e6'); + // + assert.strictEqual(b[0], 0xd3); + assert.strictEqual(b[1], 0xf2); + assert.strictEqual(b[2], 0x55); + assert.strictEqual(b[3], 0x67); + assert.strictEqual(b[4], 0x1f); + assert.strictEqual(b[5], 0x36); + assert.strictEqual(b[6], 0x76); + assert.strictEqual(b[7], 0x24); + assert.strictEqual(b[8], 0x5e); + assert.strictEqual(b[9], 0x98); + assert.strictEqual(b[10], 0xcb); + assert.strictEqual(b[11], 0x0d); + assert.strictEqual(b[12], 0xac); + assert.strictEqual(b[13], 0xa2); + assert.strictEqual(b[14], 0x2f); + assert.strictEqual(b[15], 0x9d); + assert.strictEqual(b[16], 0xeb); + assert.strictEqual(b[17], 0xd8); + assert.strictEqual(b[18], 0xa2); + assert.strictEqual(b[19], 0xe6); +} + +assert.strictEqual(qs.unescapeBuffer('a+b', true).toString(), 'a b'); +assert.strictEqual(qs.unescapeBuffer('a+b').toString(), 'a+b'); +assert.strictEqual(qs.unescapeBuffer('a%').toString(), 'a%'); +assert.strictEqual(qs.unescapeBuffer('a%2').toString(), 'a%2'); +assert.strictEqual(qs.unescapeBuffer('a%20').toString(), 'a '); +assert.strictEqual(qs.unescapeBuffer('a%2g').toString(), 'a%2g'); +assert.strictEqual(qs.unescapeBuffer('a%%').toString(), 'a%%'); + +// Test invalid encoded string +check(qs.parse('%\u0100=%\u0101'), { '%Ā': '%ā' }); + +// Test custom decode +{ + function demoDecode(str) { + return str + str; + } + + check( + qs.parse('a=a&b=b&c=c', null, null, { decodeURIComponent: demoDecode }), + { aa: 'aa', bb: 'bb', cc: 'cc' }); + check( + qs.parse('a=a&b=b&c=c', null, '==', { decodeURIComponent: (str) => str }), + { 'a=a': '', 'b=b': '', 'c=c': '' }); +} + +// Test QueryString.unescape +{ + function errDecode(str) { + throw new Error('To jump to the catch scope'); + } + + check(qs.parse('a=a', null, null, { decodeURIComponent: errDecode }), + { a: 'a' }); +} + +// Test custom encode +{ + function demoEncode(str) { + return str[0]; + } + + const obj = { aa: 'aa', bb: 'bb', cc: 'cc' }; + assert.strictEqual( + qs.stringify(obj, null, null, { encodeURIComponent: demoEncode }), + 'a=a&b=b&c=c'); +} + +// Test custom encode for different types +{ + const obj = { number: 1, bigint: 2n, true: true, false: false, object: {} }; + assert.strictEqual( + qs.stringify(obj, null, null, { encodeURIComponent: (v) => v }), + 'number=1&bigint=2&true=true&false=false&object='); +} + +// Test QueryString.unescapeBuffer +qsUnescapeTestCases.forEach((testCase) => { + assert.strictEqual(qs.unescape(testCase[0]), testCase[1]); + assert.strictEqual(qs.unescapeBuffer(testCase[0]).toString(), testCase[1]); +}); + +// Test overriding .unescape +{ + const prevUnescape = qs.unescape; + qs.unescape = (str) => { + return str.replace(/o/g, '_'); + }; + check( + qs.parse('foo=bor'), + createWithNoPrototype([{ key: 'f__', value: 'b_r' }])); + qs.unescape = prevUnescape; +} +// Test separator and "equals" parsing order +check(qs.parse('foo&bar', '&', '&'), { foo: '', bar: '' }); diff --git a/test/js/node/test/parallel/test-timers-immediate-queue.js b/test/js/node/test/parallel/test-timers-immediate-queue.js deleted file mode 100644 index 9bd8aa1bc7a79a..00000000000000 --- a/test/js/node/test/parallel/test-timers-immediate-queue.js +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; -const common = require('../common'); -if (common.isWindows) return; // TODO BUN -const assert = require('assert'); - -// setImmediate should run clear its queued cbs once per event loop turn -// but immediates queued while processing the current queue should happen -// on the next turn of the event loop. - -// hit should be the exact same size of QUEUE, if we're letting things -// recursively add to the immediate QUEUE hit will be > QUEUE - -let ticked = false; - -let hit = 0; -const QUEUE = 10; - -function run() { - if (hit === 0) { - setTimeout(() => { ticked = true; }, 1); - const now = Date.now(); - while (Date.now() - now < 2); - } - - if (ticked) return; - - hit += 1; - setImmediate(run); -} - -for (let i = 0; i < QUEUE; i++) - setImmediate(run); - -process.on('exit', function() { - assert.strictEqual(hit, QUEUE); -}); diff --git a/test/js/node/test/parallel/test-zlib-dictionary.js b/test/js/node/test/parallel/test-zlib-dictionary.js deleted file mode 100644 index 49a01d5a03ee4b..00000000000000 --- a/test/js/node/test/parallel/test-zlib-dictionary.js +++ /dev/null @@ -1,175 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; -// Test compression/decompression with dictionary - -const common = require('../common'); -const assert = require('assert'); -const zlib = require('zlib'); - -const spdyDict = Buffer.from([ - 'optionsgetheadpostputdeletetraceacceptaccept-charsetaccept-encodingaccept-', - 'languageauthorizationexpectfromhostif-modified-sinceif-matchif-none-matchi', - 'f-rangeif-unmodifiedsincemax-forwardsproxy-authorizationrangerefererteuser', - '-agent10010120020120220320420520630030130230330430530630740040140240340440', - '5406407408409410411412413414415416417500501502503504505accept-rangesageeta', - 'glocationproxy-authenticatepublicretry-afterservervarywarningwww-authentic', - 'ateallowcontent-basecontent-encodingcache-controlconnectiondatetrailertran', - 'sfer-encodingupgradeviawarningcontent-languagecontent-lengthcontent-locati', - 'oncontent-md5content-rangecontent-typeetagexpireslast-modifiedset-cookieMo', - 'ndayTuesdayWednesdayThursdayFridaySaturdaySundayJanFebMarAprMayJunJulAugSe', - 'pOctNovDecchunkedtext/htmlimage/pngimage/jpgimage/gifapplication/xmlapplic', - 'ation/xhtmltext/plainpublicmax-agecharset=iso-8859-1utf-8gzipdeflateHTTP/1', - '.1statusversionurl\0', -].join('')); - -const input = [ - 'HTTP/1.1 200 Ok', - 'Server: node.js', - 'Content-Length: 0', - '', -].join('\r\n'); - -function basicDictionaryTest(spdyDict) { - let output = ''; - const deflate = zlib.createDeflate({ dictionary: spdyDict }); - const inflate = zlib.createInflate({ dictionary: spdyDict }); - inflate.setEncoding('utf-8'); - - deflate.on('data', function(chunk) { - inflate.write(chunk); - }); - - inflate.on('data', function(chunk) { - output += chunk; - }); - - deflate.on('end', function() { - inflate.end(); - }); - - inflate.on('end', common.mustCall(function() { - assert.strictEqual(input, output); - })); - - deflate.write(input); - deflate.end(); -} - -function deflateResetDictionaryTest(spdyDict) { - let doneReset = false; - let output = ''; - const deflate = zlib.createDeflate({ dictionary: spdyDict }); - const inflate = zlib.createInflate({ dictionary: spdyDict }); - inflate.setEncoding('utf-8'); - - deflate.on('data', function(chunk) { - if (doneReset) - inflate.write(chunk); - }); - - inflate.on('data', function(chunk) { - output += chunk; - }); - - deflate.on('end', function() { - inflate.end(); - }); - - inflate.on('end', common.mustCall(function() { - assert.strictEqual(input, output); - })); - - deflate.write(input); - deflate.flush(function() { - deflate.reset(); - doneReset = true; - deflate.write(input); - deflate.end(); - }); -} - -function rawDictionaryTest(spdyDict) { - let output = ''; - const deflate = zlib.createDeflateRaw({ dictionary: spdyDict }); - const inflate = zlib.createInflateRaw({ dictionary: spdyDict }); - inflate.setEncoding('utf-8'); - - deflate.on('data', function(chunk) { - inflate.write(chunk); - }); - - inflate.on('data', function(chunk) { - output += chunk; - }); - - deflate.on('end', function() { - inflate.end(); - }); - - inflate.on('end', common.mustCall(function() { - assert.strictEqual(input, output); - })); - - deflate.write(input); - deflate.end(); -} - -function deflateRawResetDictionaryTest(spdyDict) { - let doneReset = false; - let output = ''; - const deflate = zlib.createDeflateRaw({ dictionary: spdyDict }); - const inflate = zlib.createInflateRaw({ dictionary: spdyDict }); - inflate.setEncoding('utf-8'); - - deflate.on('data', function(chunk) { - if (doneReset) - inflate.write(chunk); - }); - - inflate.on('data', function(chunk) { - output += chunk; - }); - - deflate.on('end', function() { - inflate.end(); - }); - - inflate.on('end', common.mustCall(function() { - assert.strictEqual(input, output); - })); - - deflate.write(input); - deflate.flush(function() { - deflate.reset(); - doneReset = true; - deflate.write(input); - deflate.end(); - }); -} - -for (const dict of [spdyDict, ...common.getBufferSources(spdyDict)]) { - basicDictionaryTest(dict); - deflateResetDictionaryTest(dict); - rawDictionaryTest(dict); - deflateRawResetDictionaryTest(dict); -}