diff --git a/src/bun.js/ConsoleObject.zig b/src/bun.js/ConsoleObject.zig index 5f081030186fbb..12a2506a4132e0 100644 --- a/src/bun.js/ConsoleObject.zig +++ b/src/bun.js/ConsoleObject.zig @@ -1879,7 +1879,7 @@ pub const Formatter = struct { writer.print( comptime Output.prettyFmt("{s}: ", enable_ansi_colors), - .{bun.fmt.formatJSONString(key.slice())}, + .{bun.fmt.formatJSONStringLatin1(key.slice())}, ); } } else if (Environment.isDebug and is_private_symbol) { diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 3d4a95e5a786c5..9fb2fefe7042b1 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -3000,7 +3000,7 @@ pub const VirtualMachine = struct { "{s} resolving preload {}", .{ @errorName(e), - bun.fmt.formatJSONString(preload), + bun.fmt.formatJSONStringLatin1(preload), }, ) catch unreachable; return e; @@ -3012,7 +3012,7 @@ pub const VirtualMachine = struct { this.allocator, "preload not found {}", .{ - bun.fmt.formatJSONString(preload), + bun.fmt.formatJSONStringLatin1(preload), }, ) catch unreachable; return error.ModuleNotFound; diff --git a/src/bun.js/test/pretty_format.zig b/src/bun.js/test/pretty_format.zig index dd119384c7a8bf..7cfeb3e212ed03 100644 --- a/src/bun.js/test/pretty_format.zig +++ b/src/bun.js/test/pretty_format.zig @@ -861,7 +861,7 @@ pub const JestPrettyFormat = struct { writer.print( comptime Output.prettyFmt("{s}: ", enable_ansi_colors), - .{bun.fmt.formatJSONString(key.slice())}, + .{bun.fmt.formatJSONStringLatin1(key.slice())}, ); } } else { diff --git a/src/cli/init_command.zig b/src/cli/init_command.zig index 86f6efd224c112..0504a2c6ddc61a 100644 --- a/src/cli/init_command.zig +++ b/src/cli/init_command.zig @@ -435,7 +435,7 @@ pub const InitCommand = struct { " \"'", fields.entry_point, )) { - Output.prettyln(" bun run {any}", .{bun.fmt.formatJSONString(fields.entry_point)}); + Output.prettyln(" bun run {any}", .{bun.fmt.formatJSONStringLatin1(fields.entry_point)}); } else { Output.prettyln(" bun run {s}", .{fields.entry_point}); } diff --git a/src/fmt.zig b/src/fmt.zig index 33d25bef1db440..986a90bb8d4dd4 100644 --- a/src/fmt.zig +++ b/src/fmt.zig @@ -250,7 +250,7 @@ const JSONFormatterUTF8 = struct { }; /// Expects latin1 -pub fn formatJSONString(text: []const u8) JSONFormatter { +pub fn formatJSONStringLatin1(text: []const u8) JSONFormatter { return .{ .input = text }; } diff --git a/src/install/bin.zig b/src/install/bin.zig index 9837f80206cc29..107dda465c6b57 100644 --- a/src/install/bin.zig +++ b/src/install/bin.zig @@ -28,16 +28,11 @@ const Lockfile = Install.Lockfile; /// - map where keys are names of the binaries and values are file paths to the binaries pub const Bin = extern struct { tag: Tag = Tag.none, - unset: u8 = 0, - _padding_tag: [2]u8 = .{0} ** 2, + _padding_tag: [3]u8 = .{0} ** 3, // Largest member must be zero initialized value: Value = Value{ .map = ExternalStringList{} }, - pub fn isUnset(this: *const Bin) bool { - return this.unset != 0; - } - pub fn count(this: *const Bin, buf: []const u8, extern_strings: []const ExternalString, comptime StringBuilder: type, builder: StringBuilder) u32 { switch (this.tag) { .file => builder.count(this.value.file.slice(buf)), @@ -64,21 +59,18 @@ pub const Bin = extern struct { .none => { return Bin{ .tag = .none, - .unset = this.unset, .value = Value.init(.{ .none = {} }), }; }, .file => { return Bin{ .tag = .file, - .unset = this.unset, .value = Value.init(.{ .file = builder.append(String, this.value.file.slice(buf)) }), }; }, .named_file => { return Bin{ .tag = .named_file, - .unset = this.unset, .value = Value.init( .{ .named_file = [2]String{ @@ -92,7 +84,6 @@ pub const Bin = extern struct { .dir => { return Bin{ .tag = .dir, - .unset = this.unset, .value = Value.init(.{ .dir = builder.append(String, this.value.dir.slice(buf)) }), }; }, @@ -103,7 +94,6 @@ pub const Bin = extern struct { return Bin{ .tag = .map, - .unset = this.unset, .value = Value.init(.{ .map = ExternalStringList.init(all_extern_strings, extern_strings_slice) }), }; }, @@ -118,7 +108,6 @@ pub const Bin = extern struct { const cloned: Bin = .{ .tag = this.tag, - .unset = this.unset, .value = switch (this.tag) { .none => Value.init(.{ .none = {} }), diff --git a/src/install/bun.lock.zig b/src/install/bun.lock.zig index 6c2d7327e3e5c9..3b799eefc5e9b9 100644 --- a/src/install/bun.lock.zig +++ b/src/install/bun.lock.zig @@ -36,6 +36,8 @@ const Negatable = Npm.Negatable; const DependencyID = Install.DependencyID; const invalid_dependency_id = Install.invalid_dependency_id; const DependencyIDSlice = BinaryLockfile.DependencyIDSlice; +const Bin = Install.Bin; +const ExternalString = Semver.ExternalString; /// A property key in the `packages` field of the lockfile pub const PkgPath = struct { @@ -421,6 +423,7 @@ pub const Stringifier = struct { const pkg_names: []String = pkgs.items(.name); const pkg_name_hashes: []PackageNameHash = pkgs.items(.name_hash); const pkg_metas: []BinaryLockfile.Package.Meta = pkgs.items(.meta); + const pkg_bins = pkgs.items(.bin); var temp_buf: std.ArrayListUnmanaged(u8) = .{}; defer temp_buf.deinit(allocator); @@ -744,8 +747,9 @@ pub const Stringifier = struct { dep_name, }); - const pkg_name = pkg_names[pkg_id].slice(buf); + const pkg_name = pkg_names[pkg_id]; const pkg_meta = pkg_metas[pkg_id]; + const pkg_bin = pkg_bins[pkg_id]; const pkg_deps_list = pkg_dep_lists[pkg_id]; pkg_deps_sort_buf.clearRetainingCapacity(); @@ -774,53 +778,53 @@ pub const Stringifier = struct { switch (res.tag) { .root => { try writer.print("[\"{}@root:\"]", .{ - bun.fmt.formatJSONStringUTF8(pkg_name, .{ .quote = false }), + pkg_name.fmtJson(buf, .{ .quote = false }), // we don't read the root package version into the binary lockfile }); }, .folder => { - try writer.print("[\"{s}@file:{}\", ", .{ - pkg_name, - bun.fmt.formatJSONStringUTF8(res.value.folder.slice(buf), .{ .quote = false }), + try writer.print("[\"{}@file:{}\", ", .{ + pkg_name.fmtJson(buf, .{ .quote = false }), + res.value.folder.fmtJson(buf, .{ .quote = false }), }); - try writePackageDepsAndMeta(writer, dep_id, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, buf, &optional_peers_buf, &lockfile.buffers.extern_strings); + try writePackageInfoObject(writer, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, &pkg_bin, buf, &optional_peers_buf, lockfile.buffers.extern_strings.items); try writer.writeByte(']'); }, .local_tarball => { - try writer.print("[\"{s}@{}\", ", .{ - pkg_name, - bun.fmt.formatJSONStringUTF8(res.value.local_tarball.slice(buf), .{ .quote = false }), + try writer.print("[\"{}@{}\", ", .{ + pkg_name.fmtJson(buf, .{ .quote = false }), + res.value.local_tarball.fmtJson(buf, .{ .quote = false }), }); - try writePackageDepsAndMeta(writer, dep_id, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, buf, &optional_peers_buf, &lockfile.buffers.extern_strings); + try writePackageInfoObject(writer, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, &pkg_bin, buf, &optional_peers_buf, lockfile.buffers.extern_strings.items); try writer.writeByte(']'); }, .remote_tarball => { - try writer.print("[\"{s}@{}\", ", .{ - pkg_name, - bun.fmt.formatJSONStringUTF8(res.value.remote_tarball.slice(buf), .{ .quote = false }), + try writer.print("[\"{}@{}\", ", .{ + pkg_name.fmtJson(buf, .{ .quote = false }), + res.value.remote_tarball.fmtJson(buf, .{ .quote = false }), }); - try writePackageDepsAndMeta(writer, dep_id, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, buf, &optional_peers_buf, &lockfile.buffers.extern_strings); + try writePackageInfoObject(writer, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, &pkg_bin, buf, &optional_peers_buf, lockfile.buffers.extern_strings.items); try writer.writeByte(']'); }, .symlink => { - try writer.print("[\"{s}@link:{}\", ", .{ - pkg_name, - bun.fmt.formatJSONStringUTF8(res.value.symlink.slice(buf), .{ .quote = false }), + try writer.print("[\"{}@link:{}\", ", .{ + pkg_name.fmtJson(buf, .{ .quote = false }), + res.value.symlink.fmtJson(buf, .{ .quote = false }), }); - try writePackageDepsAndMeta(writer, dep_id, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, buf, &optional_peers_buf, &lockfile.buffers.extern_strings); + try writePackageInfoObject(writer, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, &pkg_bin, buf, &optional_peers_buf, lockfile.buffers.extern_strings.items); try writer.writeByte(']'); }, .npm => { - try writer.print("[\"{s}@{}\", ", .{ - pkg_name, + try writer.print("[\"{}@{}\", ", .{ + pkg_name.fmtJson(buf, .{ .quote = false }), res.value.npm.version.fmt(buf), }); @@ -832,35 +836,33 @@ pub const Stringifier = struct { res.value.npm.url.slice(buf), }); - try writePackageDepsAndMeta(writer, dep_id, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, buf, &optional_peers_buf, &lockfile.buffers.extern_strings); + try writePackageInfoObject(writer, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, &pkg_bin, buf, &optional_peers_buf, lockfile.buffers.extern_strings.items); try writer.print(", \"{}\"]", .{ pkg_meta.integrity, }); }, .workspace => { - const workspace_path = res.value.workspace.slice(buf); - - try writer.print("[\"{s}@workspace:{}\", ", .{ - pkg_name, - bun.fmt.formatJSONStringUTF8(workspace_path, .{ .quote = false }), + try writer.print("[\"{}@workspace:{}\", ", .{ + pkg_name.fmtJson(buf, .{ .quote = false }), + res.value.workspace.fmtJson(buf, .{ .quote = false }), }); - try writePackageDepsAndMeta(writer, dep_id, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, buf, &optional_peers_buf, &lockfile.buffers.extern_strings); + try writePackageInfoObject(writer, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, &pkg_bin, buf, &optional_peers_buf, lockfile.buffers.extern_strings.items); try writer.writeByte(']'); }, inline .git, .github => |tag| { const repo: Repository = @field(res.value, @tagName(tag)); - try writer.print("[\"{s}@{}\", ", .{ - pkg_name, + try writer.print("[\"{}@{}\", ", .{ + pkg_name.fmtJson(buf, .{ .quote = false }), repo.fmt(if (comptime tag == .git) "git+" else "github:", buf), }); - try writePackageDepsAndMeta(writer, dep_id, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, buf, &optional_peers_buf, &lockfile.buffers.extern_strings); + try writePackageInfoObject(writer, deps_buf, pkg_deps_sort_buf.items, &pkg_meta, &pkg_bin, buf, &optional_peers_buf, lockfile.buffers.extern_strings.items); try writer.print(", {}]", .{ - bun.fmt.formatJSONStringUTF8(repo.resolved.slice(buf), .{ .quote = true }), + repo.resolved.fmtJson(buf, .{}), }); }, else => unreachable, @@ -881,18 +883,17 @@ pub const Stringifier = struct { return writer_buf.list.items; } - /// Writes a single line object. + /// Writes a single line object. Contains dependencies, os, cpu, libc (soon), and bin /// { "devDependencies": { "one": "1.1.1", "two": "2.2.2" }, "os": "none" } - fn writePackageDepsAndMeta( + fn writePackageInfoObject( writer: anytype, - _: DependencyID, deps_buf: []const Dependency, pkg_dep_ids: []const DependencyID, meta: *const Meta, bin: *const Install.Bin, buf: string, optional_peers_buf: *std.ArrayList(String), - extern_strings: *const Install.Lockfile.ExternalStringBuffer, + extern_strings: []const ExternalString, ) OOM!void { defer optional_peers_buf.clearRetainingCapacity(); @@ -997,9 +998,9 @@ pub const Stringifier = struct { any = true; } try writer.print( - \\ "bin": "{s}" + \\ "bin": {} , .{ - bin.value.file.slice(buf), + bin.value.file.fmtJson(buf, .{}), }); }, .named_file => { @@ -1012,10 +1013,10 @@ pub const Stringifier = struct { \\ "bin": { ); try writer.print( - \\ "{s}": "{s}" + \\ {}: {} , .{ - bin.value.named_file[0].slice(buf), - bin.value.named_file[1].slice(buf), + bin.value.named_file[0].fmtJson(buf, .{}), + bin.value.named_file[1].fmtJson(buf, .{}), }); try writer.writeByte('}'); }, @@ -1026,9 +1027,9 @@ pub const Stringifier = struct { any = true; } try writer.print( - \\ "bin": "{s}" + \\ "binDir": {} , .{ - bin.value.dir.slice(buf), + bin.value.dir.fmtJson(buf, .{}), }); }, .map => { @@ -1049,10 +1050,10 @@ pub const Stringifier = struct { } first = false; try writer.print( - \\ "{s}": "{s}" + \\ {}: {} , .{ - list[i].slice(buf), - list[i + 1].slice(buf), + list[i].value.fmtJson(buf, .{}), + list[i + 1].value.fmtJson(buf, .{}), }); } try writer.writeByte('}'); @@ -1591,33 +1592,35 @@ pub fn parseIntoBinaryLockfile( return error.InvalidPackageInfo; } - const deps_os_cpu_libc_obj_bin = pkg_info.at(i); + const deps_os_cpu_libc_bin_obj = pkg_info.at(i); i += 1; - if (!deps_os_cpu_libc_obj_bin.isObject()) { - try log.addError(source, deps_os_cpu_libc_obj_bin.loc, "Expected an object"); + if (!deps_os_cpu_libc_bin_obj.isObject()) { + try log.addError(source, deps_os_cpu_libc_bin_obj.loc, "Expected an object"); return error.InvalidPackageInfo; } - const off, const len = try parseAppendDependencies(lockfile, allocator, deps_os_cpu_libc_obj_bin, &string_buf, log, source, &optional_peers_buf); + const off, const len = try parseAppendDependencies(lockfile, allocator, deps_os_cpu_libc_bin_obj, &string_buf, log, source, &optional_peers_buf); pkg.dependencies = .{ .off = off, .len = len }; pkg.resolutions = .{ .off = off, .len = len }; + if (deps_os_cpu_libc_bin_obj.get("bin")) |bin| { + pkg.bin = try Bin.parseAppend(allocator, bin, &string_buf, &lockfile.buffers.extern_strings); + } else if (deps_os_cpu_libc_bin_obj.get("binDir")) |bin_dir| { + pkg.bin = try Bin.parseAppendFromDirectories(allocator, bin_dir, &string_buf); + } + if (res.tag != .workspace) { - if (deps_os_cpu_libc_obj_bin.get("os")) |os| { + if (deps_os_cpu_libc_bin_obj.get("os")) |os| { pkg.meta.os = try Negatable(Npm.OperatingSystem).fromJson(allocator, os); } - if (deps_os_cpu_libc_obj_bin.get("cpu")) |arch| { + if (deps_os_cpu_libc_bin_obj.get("cpu")) |arch| { pkg.meta.arch = try Negatable(Npm.Architecture).fromJson(allocator, arch); } // TODO(dylan-conway) // if (os_cpu_libc_obj.get("libc")) |libc| { // pkg.meta.libc = Negatable(Npm.Libc).fromJson(allocator, libc); // } - - if (deps_os_cpu_libc_obj_bin.get("bin")) |bin| { - pkg.bin = Install.Bin.parseAppend(allocator, bin, &string_buf, &lockfile.buffers.extern_strings) catch bun.outOfMemory(); - } } }, else => {}, @@ -1662,11 +1665,6 @@ pub fn parseIntoBinaryLockfile( pkg.name = name; pkg.name_hash = name_hash; pkg.resolution = res; - - // set later - pkg.bin = .{ - .unset = 1, - }; pkg.scripts = .{}; const pkg_id = try lockfile.appendPackageDedupe(&pkg, string_buf.bytes.items); diff --git a/src/install/install.zig b/src/install/install.zig index 59d8ec654128ce..7a7dce5d464c50 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -1095,7 +1095,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { fn verifyPatchHash( this: *@This(), root_node_modules_dir: std.fs.Dir, - ) VerifyResult { + ) bool { bun.debugAssert(!this.patch.isNull()); // hash from the .patch file, to be checked against bun tag @@ -1108,22 +1108,21 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { bunhashtag, }, .posix); - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return .{}; + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; defer { if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); } if (comptime bun.Environment.isPosix) { - _ = bun.sys.fstatat(bun.toFD(destination_dir.fd), patch_tag_path).unwrap() catch return .{}; + _ = bun.sys.fstatat(bun.toFD(destination_dir.fd), patch_tag_path).unwrap() catch return false; } else { switch (bun.sys.openat(bun.toFD(destination_dir.fd), patch_tag_path, bun.O.RDONLY, 0)) { - .err => return .{}, + .err => return false, .result => |fd| _ = bun.sys.close(fd), } } - return .{ - .valid = true, - }; + + return true; } // 1. verify that .bun-tag exists (was it installed from bun?) @@ -1132,7 +1131,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { this: *@This(), repo: *const Repository, root_node_modules_dir: std.fs.Dir, - ) VerifyResult { + ) bool { bun.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ ".bun-tag"); this.destination_dir_subpath_buf[this.destination_dir_subpath.len + std.fs.path.sep_str.len + ".bun-tag".len] = 0; const bun_tag_path: [:0]u8 = this.destination_dir_subpath_buf[0 .. this.destination_dir_subpath.len + std.fs.path.sep_str.len + ".bun-tag".len :0]; @@ -1140,7 +1139,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { var git_tag_stack_fallback = std.heap.stackFallback(2048, bun.default_allocator); const allocator = git_tag_stack_fallback.get(); - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return .{}; + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; defer { if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); } @@ -1149,52 +1148,43 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { destination_dir, bun_tag_path, allocator, - ).unwrap() catch return .{}; + ).unwrap() catch return false; defer allocator.free(bun_tag_file); - return .{ - .valid = strings.eqlLong(repo.resolved.slice(this.lockfile.buffers.string_bytes.items), bun_tag_file, true), - }; + return strings.eqlLong(repo.resolved.slice(this.lockfile.buffers.string_bytes.items), bun_tag_file, true); } pub fn verify( this: *@This(), resolution: *const Resolution, root_node_modules_dir: std.fs.Dir, - bin: *Bin, - ) VerifyResult { + ) bool { const verified = switch (resolution.tag) { .git => this.verifyGitResolution(&resolution.value.git, root_node_modules_dir), .github => this.verifyGitResolution(&resolution.value.github, root_node_modules_dir), .root => this.verifyTransitiveSymlinkedFolder(root_node_modules_dir), .folder => if (this.lockfile.isWorkspaceTreeId(this.node_modules.tree_id)) - this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag, bin) + this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag) else this.verifyTransitiveSymlinkedFolder(root_node_modules_dir), - else => this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag, bin), + else => this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag), }; if (comptime kind == .patch) return verified; if (this.patch.isNull()) return verified; - if (!verified.valid) return verified; + if (!verified) return false; return this.verifyPatchHash(root_node_modules_dir); } // Only check for destination directory in node_modules. We can't use package.json because // it might not exist - fn verifyTransitiveSymlinkedFolder(this: *@This(), root_node_modules_dir: std.fs.Dir) VerifyResult { - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return .{}; + fn verifyTransitiveSymlinkedFolder(this: *@This(), root_node_modules_dir: std.fs.Dir) bool { + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; defer destination_dir.close(); - const exists = bun.sys.directoryExistsAt(destination_dir.fd, this.destination_dir_subpath).unwrap() catch return .{}; - return if (exists) .{ .valid = true } else .{}; + return bun.sys.directoryExistsAt(destination_dir.fd, this.destination_dir_subpath).unwrap() catch false; } - const VerifyResult = struct { - valid: bool = false, - update_lockfile_pointers: bool = false, - }; - fn getInstalledPackageJsonSource( this: *PackageInstall, root_node_modules_dir: std.fs.Dir, @@ -1246,7 +1236,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { return logger.Source.initPathString(bun.span(package_json_path), mutable.list.items[0..total]); } - fn verifyPackageJSONNameAndVersion(this: *PackageInstall, root_node_modules_dir: std.fs.Dir, resolution_tag: Resolution.Tag, bin: *Bin) VerifyResult { + fn verifyPackageJSONNameAndVersion(this: *PackageInstall, root_node_modules_dir: std.fs.Dir, resolution_tag: Resolution.Tag) bool { var body_pool = Npm.Registry.BodyPool.get(this.allocator); var mutable: MutableString = body_pool.data; defer { @@ -1259,7 +1249,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { // Don't keep it open while we're parsing the JSON. // The longer the file stays open, the more likely it causes issues for // other processes on Windows. - const source = this.getInstalledPackageJsonSource(root_node_modules_dir, &mutable, resolution_tag) orelse return .{}; + const source = this.getInstalledPackageJsonSource(root_node_modules_dir, &mutable, resolution_tag) orelse return false; var log = logger.Log.init(this.allocator); defer log.deinit(); @@ -1270,12 +1260,11 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { this.allocator, &source, &log, - if (bin.isUnset()) .check_for_bin else .ignore_bin, - ) catch return .{}; - _ = package_json_checker.parseExpr(false, false) catch return .{}; - if (log.errors > 0 or !package_json_checker.has_found_name) return .{}; + ) catch return false; + _ = package_json_checker.parseExpr() catch return false; + if (log.errors > 0 or !package_json_checker.has_found_name) return false; // workspaces aren't required to have a version - if (!package_json_checker.has_found_version and resolution_tag != .workspace) return .{}; + if (!package_json_checker.has_found_version and resolution_tag != .workspace) return false; const found_version = package_json_checker.found_version; @@ -1308,43 +1297,14 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { } // If we didn't find any of these characters, there's no point in checking the version again. // it will never match. - return .{}; + return false; }; - if (!strings.eql(found_version[offset..], this.package_version)) return .{}; + if (!strings.eql(found_version[offset..], this.package_version)) return false; } // lastly, check the name. - if (strings.eql(package_json_checker.found_name, this.package_name.slice(this.lockfile.buffers.string_bytes.items))) { - // only want to set bins if up-to-date - if (bin.isUnset() and package_json_checker.has_found_bin) { - var string_buf = this.lockfile.stringBuf(); - defer string_buf.apply(this.lockfile); - - switch (package_json_checker.found_bin) { - .bin => |expr| { - bin.* = Bin.parseAppend(this.lockfile.allocator, expr, &string_buf, &this.lockfile.buffers.extern_strings) catch bun.outOfMemory(); - }, - .dir => |expr| { - bin.* = Bin.parseAppendFromDirectories(this.lockfile.allocator, expr, &string_buf) catch bun.outOfMemory(); - }, - } - - return .{ - .valid = true, - .update_lockfile_pointers = true, - }; - } else if (bin.isUnset()) { - // It's not unset. There's no bin. - bin.unset = 0; - } - - return .{ - .valid = true, - }; - } - - return .{}; + return strings.eql(package_json_checker.found_name, this.package_name.slice(this.lockfile.buffers.string_bytes.items)); } pub const Result = union(Tag) { @@ -12223,7 +12183,7 @@ pub const PackageManager = struct { metas: []const Lockfile.Package.Meta, names: []const String, pkg_name_hashes: []const PackageNameHash, - bins: []Bin, + bins: []const Bin, resolutions: []Resolution, node: *Progress.Node, destination_dir_subpath_buf: bun.PathBuffer = undefined, @@ -13002,19 +12962,10 @@ pub const PackageManager = struct { }, } - const needs_install = this.force_install or this.skip_verify_installed_version_number or !needs_verify or remove_patch or verify: { - const verified = installer.verify( - resolution, - this.root_node_modules_folder, - &this.bins[package_id], - ); - - if (verified.update_lockfile_pointers) { - this.fixCachedLockfilePackageSlices(); - } - - break :verify !verified.valid; - }; + const needs_install = this.force_install or this.skip_verify_installed_version_number or !needs_verify or remove_patch or !installer.verify( + resolution, + this.root_node_modules_folder, + ); this.summary.skipped += @intFromBool(!needs_install); if (needs_install) { @@ -13186,17 +13137,6 @@ pub const PackageManager = struct { this.node.completeOne(); } - if (this.bins[package_id].isUnset()) { - this.bins[package_id] = this.getPackageBin( - &installer, - pkg_name.slice(this.lockfile.buffers.string_bytes.items), - pkg_name_hash, - resolution, - ) catch |err| switch (err) { - error.OutOfMemory => bun.outOfMemory(), - }; - } - if (this.bins[package_id].tag != .none) { this.trees[this.current_tree_id].binaries.add(dependency_id) catch bun.outOfMemory(); } @@ -13335,16 +13275,6 @@ pub const PackageManager = struct { }, } } else { - if (this.bins[package_id].isUnset()) { - this.bins[package_id] = this.getPackageBin( - &installer, - pkg_name.slice(this.lockfile.buffers.string_bytes.items), - pkg_name_hash, - resolution, - ) catch |err| switch (err) { - error.OutOfMemory => bun.outOfMemory(), - }; - } if (this.bins[package_id].tag != .none) { this.trees[this.current_tree_id].binaries.add(dependency_id) catch bun.outOfMemory(); } @@ -13400,69 +13330,69 @@ pub const PackageManager = struct { } } - fn getPackageBin( - this: *PackageInstaller, - installer: *PackageInstall, - pkg_name: string, - pkg_name_hash: PackageNameHash, - resolution: *const Resolution, - ) OOM!Bin { - defer this.fixCachedLockfilePackageSlices(); - - if (resolution.tag == .npm) { - var expired = false; - if (this.manager.manifests.byNameHashAllowExpired( - this.manager, - this.manager.scopeForPackageName(pkg_name), - pkg_name_hash, - &expired, - // Do not fallback to disk. These files are much larger than the package.json - .load_from_memory, - )) |manifest| { - if (manifest.findByVersion(resolution.value.npm.version)) |find| { - return find.package.bin.cloneAppend(manifest.string_buf, manifest.extern_strings_bin_entries, this.lockfile); - } - } - } - - // get it from package.json - var body_pool = Npm.Registry.BodyPool.get(this.lockfile.allocator); - var mutable = body_pool.data; - defer { - body_pool.data = mutable; - Npm.Registry.BodyPool.release(body_pool); - } - - const source = installer.getInstalledPackageJsonSource(this.root_node_modules_folder, &mutable, resolution.tag) orelse return .{}; - - initializeStore(); - - var log = logger.Log.init(this.lockfile.allocator); - defer log.deinit(); - - var bin_finder = JSON.PackageJSONVersionChecker.init( - this.lockfile.allocator, - &source, - &log, - .only_bin, - ) catch return .{}; - _ = bin_finder.parseExpr(false, false) catch return .{}; - - if (bin_finder.has_found_bin) { - var string_buf = this.lockfile.stringBuf(); - defer { - string_buf.apply(this.lockfile); - this.fixCachedLockfilePackageSlices(); - } - - return switch (bin_finder.found_bin) { - .bin => |bin| try Bin.parseAppend(this.lockfile.allocator, bin, &string_buf, &this.lockfile.buffers.extern_strings), - .dir => |dir| try Bin.parseAppendFromDirectories(this.lockfile.allocator, dir, &string_buf), - }; - } - - return .{}; - } + // fn getPackageBin( + // this: *PackageInstaller, + // installer: *PackageInstall, + // pkg_name: string, + // pkg_name_hash: PackageNameHash, + // resolution: *const Resolution, + // ) OOM!Bin { + // defer this.fixCachedLockfilePackageSlices(); + + // if (resolution.tag == .npm) { + // var expired = false; + // if (this.manager.manifests.byNameHashAllowExpired( + // this.manager, + // this.manager.scopeForPackageName(pkg_name), + // pkg_name_hash, + // &expired, + // // Do not fallback to disk. These files are much larger than the package.json + // .load_from_memory, + // )) |manifest| { + // if (manifest.findByVersion(resolution.value.npm.version)) |find| { + // return find.package.bin.cloneAppend(manifest.string_buf, manifest.extern_strings_bin_entries, this.lockfile); + // } + // } + // } + + // // get it from package.json + // var body_pool = Npm.Registry.BodyPool.get(this.lockfile.allocator); + // var mutable = body_pool.data; + // defer { + // body_pool.data = mutable; + // Npm.Registry.BodyPool.release(body_pool); + // } + + // const source = installer.getInstalledPackageJsonSource(this.root_node_modules_folder, &mutable, resolution.tag) orelse return .{}; + + // initializeStore(); + + // var log = logger.Log.init(this.lockfile.allocator); + // defer log.deinit(); + + // var bin_finder = JSON.PackageJSONVersionChecker.init( + // this.lockfile.allocator, + // &source, + // &log, + // .only_bin, + // ) catch return .{}; + // _ = bin_finder.parseExpr(false, false) catch return .{}; + + // if (bin_finder.has_found_bin) { + // var string_buf = this.lockfile.stringBuf(); + // defer { + // string_buf.apply(this.lockfile); + // this.fixCachedLockfilePackageSlices(); + // } + + // return switch (bin_finder.found_bin) { + // .bin => |bin| try Bin.parseAppend(this.lockfile.allocator, bin, &string_buf, &this.lockfile.buffers.extern_strings), + // .dir => |dir| try Bin.parseAppendFromDirectories(this.lockfile.allocator, dir, &string_buf), + // }; + // } + + // return .{}; + // } // returns true if scripts are enqueued fn enqueueLifecycleScripts( diff --git a/src/install/semver.zig b/src/install/semver.zig index 8cd6cb0bc473b0..2dbc1a8a6ccee5 100644 --- a/src/install/semver.zig +++ b/src/install/semver.zig @@ -160,6 +160,29 @@ pub const String = extern struct { } }; + /// Escapes for json. Expects string to be prequoted + pub inline fn fmtJson(self: *const String, buf: []const u8, opts: JsonFormatter.Options) JsonFormatter { + return .{ + .buf = buf, + .str = self, + .opts = opts, + }; + } + + pub const JsonFormatter = struct { + str: *const String, + buf: string, + opts: Options, + + pub const Options = struct { + quote: bool = true, + }; + + pub fn format(formatter: JsonFormatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + try writer.print("{}", .{bun.fmt.formatJSONStringUTF8(formatter.str.slice(formatter.buf), .{ .quote = formatter.opts.quote })}); + } + }; + pub fn Sorter(comptime direction: enum { asc, desc }) type { return struct { lhs_buf: []const u8, diff --git a/src/json_parser.zig b/src/json_parser.zig index 22ef5f46329a79..c3fbf960e33c53 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -347,16 +347,14 @@ fn JSONLikeParser_( }; } -// This is a special JSON parser that stops as soon as it finds combinations of +// This is a special JSON parser that stops as soon as it finds // { // "name": "NAME_IN_HERE", // "version": "VERSION_IN_HERE", -// "bin": ... or "directories": { "bin": ... } // } -// and then returns the name, version, and bin -// More precisely, it stops as soon as it finds a top-level "name" and "version" (and/or "bin"). -// In most cases, it should perform zero heap allocations because it does not create arrays or objects (It just skips them). -// If searching for "bin", objects are only created if the key is top level "bin". "bin" within "directories" can only be a string. +// and then returns the name and version. +// More precisely, it stops as soon as it finds a top-level "name" and "version" property which are strings +// In most cases, it should perform zero heap allocations because it does not create arrays or objects (It just skips them) pub const PackageJSONVersionChecker = struct { const Lexer = js_lexer.NewLexer(opts); @@ -371,14 +369,9 @@ pub const PackageJSONVersionChecker = struct { found_name: []const u8 = "", found_version: []const u8 = "", - found_bin: union(enum) { - bin: Expr, - dir: Expr, - } = .{ .bin = Expr.empty }, has_found_name: bool = false, has_found_version: bool = false, - has_found_bin: bool = false, name_loc: logger.Loc = logger.Loc.Empty, @@ -389,24 +382,21 @@ pub const PackageJSONVersionChecker = struct { .allow_comments = true, }; - pub fn init(allocator: std.mem.Allocator, source: *const logger.Source, log: *logger.Log, checks: enum { check_for_bin, ignore_bin, only_bin }) !Parser { + pub fn init(allocator: std.mem.Allocator, source: *const logger.Source, log: *logger.Log) !Parser { return Parser{ .lexer = try Lexer.init(log, source.*, allocator), .allocator = allocator, .log = log, .source = source, - .has_found_bin = checks == .ignore_bin, - .has_found_name = checks == .only_bin, - .has_found_version = checks == .only_bin, }; } const Parser = @This(); - pub fn parseExpr(p: *Parser, collect_props: bool, parent_is_directories: bool) anyerror!Expr { + pub fn parseExpr(p: *Parser) anyerror!Expr { const loc = p.lexer.loc(); - if (p.has_found_name and p.has_found_version and p.has_found_bin) return newExpr(E.Missing{}, loc); + if (p.has_found_name and p.has_found_version) return newExpr(E.Missing{}, loc); switch (p.lexer.token) { .t_false => { @@ -453,7 +443,7 @@ pub const PackageJSONVersionChecker = struct { } } - _ = try p.parseExpr(false, false); + _ = try p.parseExpr(); has_exprs = true; } @@ -465,8 +455,6 @@ pub const PackageJSONVersionChecker = struct { p.depth += 1; defer p.depth -= 1; - var properties = std.ArrayList(G.Property).init(p.allocator); - var has_properties = false; while (p.lexer.token != .t_close_brace) { if (has_properties) { @@ -483,95 +471,40 @@ pub const PackageJSONVersionChecker = struct { try p.lexer.expect(.t_colon); - var collect_prop_props = false; - var is_directories = false; - - if (!p.has_found_bin and - p.depth == 1 and - // next is going to be a top level property - // with an object value. check if it is "bin" - // or "directories" - p.lexer.token == .t_open_brace and - key.data == .e_string) - { - if (strings.eqlComptime(key.data.e_string.data, "bin")) { - collect_prop_props = true; - } else if (strings.eqlComptime(key.data.e_string.data, "directories")) { - is_directories = true; - } - - // if bin is in directories it can only be a string, so - // don't need to set collect_prop_props when depth == 2 - // and in parent_is_directories == true. - - } - - const value = try p.parseExpr(collect_prop_props, is_directories); + const value = try p.parseExpr(); if (p.depth == 1) { // if you have multiple "name" fields in the package.json.... // first one wins - if (key.data == .e_string) { - if (value.data == .e_string) { - if (!p.has_found_name and strings.eqlComptime(key.data.e_string.data, "name")) { - const len = @min( - value.data.e_string.data.len, - p.found_name_buf.len, - ); - - bun.copy(u8, &p.found_name_buf, value.data.e_string.data[0..len]); - p.found_name = p.found_name_buf[0..len]; - p.has_found_name = true; - p.name_loc = value.loc; - } else if (!p.has_found_version and strings.eqlComptime(key.data.e_string.data, "version")) { - const len = @min( - value.data.e_string.data.len, - p.found_version_buf.len, - ); - bun.copy(u8, &p.found_version_buf, value.data.e_string.data[0..len]); - p.found_version = p.found_version_buf[0..len]; - p.has_found_version = true; - } - } - - if (!p.has_found_bin and strings.eqlComptime(key.data.e_string.data, "bin")) { - p.found_bin = .{ - .bin = value, - }; - p.has_found_bin = true; - } - } - } else if (parent_is_directories) { - if (key.data == .e_string) { - if (!p.has_found_bin and strings.eqlComptime(key.data.e_string.data, "bin")) { - p.found_bin = .{ - .dir = value, - }; - p.has_found_bin = true; + if (key.data == .e_string and value.data == .e_string) { + if (!p.has_found_name and strings.eqlComptime(key.data.e_string.data, "name")) { + const len = @min( + value.data.e_string.data.len, + p.found_name_buf.len, + ); + + bun.copy(u8, &p.found_name_buf, value.data.e_string.data[0..len]); + p.found_name = p.found_name_buf[0..len]; + p.has_found_name = true; + p.name_loc = value.loc; + } else if (!p.has_found_version and strings.eqlComptime(key.data.e_string.data, "version")) { + const len = @min( + value.data.e_string.data.len, + p.found_version_buf.len, + ); + bun.copy(u8, &p.found_version_buf, value.data.e_string.data[0..len]); + p.found_version = p.found_version_buf[0..len]; + p.has_found_version = true; } } } - if (p.has_found_name and p.has_found_version and p.has_found_bin) return newExpr(E.Missing{}, loc); + if (p.has_found_name and p.has_found_version) return newExpr(E.Missing{}, loc); has_properties = true; - if (collect_props) { - properties.append(.{ - .key = key, - .value = value, - .kind = .normal, - .initializer = null, - }) catch bun.outOfMemory(); - } } try p.lexer.expect(.t_close_brace); - - if (collect_props) { - return newExpr(E.Object{ - .properties = G.Property.List.fromList(properties), - }, loc); - } return newExpr(E.Missing{}, loc); }, else => {