From 455de2a449151f99def91a307c33f0c551826390 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Tue, 10 Dec 2024 20:33:36 -0800 Subject: [PATCH 1/7] deps: update boringssl (#15677) --- cmake/targets/BuildBoringSSL.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/targets/BuildBoringSSL.cmake b/cmake/targets/BuildBoringSSL.cmake index 28575eb35f7b6d..8b709b3de28e6b 100644 --- a/cmake/targets/BuildBoringSSL.cmake +++ b/cmake/targets/BuildBoringSSL.cmake @@ -4,7 +4,7 @@ register_repository( REPOSITORY oven-sh/boringssl COMMIT - 29a2cd359458c9384694b75456026e4b57e3e567 + 914b005ef3ece44159dca0ffad74eb42a9f6679f ) register_cmake_command( From 0bc57eebcb87390c490391a9bbd682b334e2bfaf Mon Sep 17 00:00:00 2001 From: Don Isaac Date: Tue, 10 Dec 2024 22:05:46 -0800 Subject: [PATCH 2/7] =?UTF-8?q?test(deno):=20use=20`expect.toBeGreaterThan?= =?UTF-8?q?orEqual`=20on=20failing=20deno=20perf=20=E2=80=A6=20(#15700)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Don Isaac --- test/js/deno/harness.ts | 20 ++++++++++++++++++++ test/js/deno/performance/performance.test.ts | 5 +++-- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/test/js/deno/harness.ts b/test/js/deno/harness.ts index d3a0439e2a8c4c..64b6f2b7d70d11 100644 --- a/test/js/deno/harness.ts +++ b/test/js/deno/harness.ts @@ -130,6 +130,22 @@ export function createDenoTest(path: string, defaultTimeout = 5000) { } }; + const assertGreaterThan = (actual: number, expected: number, message?: string) => { + expect(actual).toBeGreaterThan(expected); + } + + const assertGreaterThanOrEqual = (actual: number, expected: number, message?: string) => { + expect(actual).toBeGreaterThanOrEqual(expected); + } + + const assertLessThan = (actual: number, expected: number, message?: string) => { + expect(actual).toBeLessThan(expected); + } + + const assertLessThanOrEqual = (actual: number, expected: number, message?: string) => { + expect(actual).toBeLessThanOrEqual(expected); + } + const assertInstanceOf = (actual: unknown, expected: unknown, message?: string) => { expect(actual).toBeInstanceOf(expected); }; @@ -328,6 +344,10 @@ export function createDenoTest(path: string, defaultTimeout = 5000) { assertStrictEquals, assertNotStrictEquals, assertAlmostEquals, + assertGreaterThan, + assertGreaterThanOrEqual, + assertLessThan, + assertLessThanOrEqual, assertInstanceOf, assertNotInstanceOf, assertStringIncludes, diff --git a/test/js/deno/performance/performance.test.ts b/test/js/deno/performance/performance.test.ts index 8753b774f86553..5dba6df82a4b7b 100644 --- a/test/js/deno/performance/performance.test.ts +++ b/test/js/deno/performance/performance.test.ts @@ -1,6 +1,6 @@ // Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. import { createDenoTest } from "deno:harness"; -const { test, assert, assertEquals, assertThrows } = createDenoTest(import.meta.path); +const { test, assert, assertEquals, assertGreaterThanOrEqual, assertThrows } = createDenoTest(import.meta.path); test({ permissions: { hrtime: false } }, async function performanceNow() { const { promise, resolve } = Promise.withResolvers(); @@ -90,7 +90,8 @@ test(function performanceMeasure() { assertEquals(measure2.startTime, 0); assertEquals(mark1.startTime, measure1.startTime); assertEquals(mark1.startTime, measure2.duration); - assert(measure1.duration >= 100, `duration below 100ms: ${measure1.duration}`); + // assert(measure1.duration >= 100, `duration below 100ms: ${measure1.duration}`); + assertGreaterThanOrEqual(measure1.duration, 100, `duration below 100ms: ${measure1.duration}`); assert( measure1.duration < (later - now) * 1.5, `duration exceeds 150% of wallclock time: ${measure1.duration}ms vs ${later - now}ms`, From 5cfa4cc0afd52c601240850130c671d1f2ecc030 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 00:34:19 -0800 Subject: [PATCH 3/7] =?UTF-8?q?=E2=9C=82=EF=B8=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../test/parallel/test-fs-readfile-flags.js | 50 ----- ...est-fs-watch-file-enoent-after-deletion.js | 47 ----- .../parallel/test-timers-immediate-queue.js | 57 ------ .../test/parallel/test-zlib-dictionary.js | 175 ------------------ 4 files changed, 329 deletions(-) delete mode 100644 test/js/node/test/parallel/test-fs-readfile-flags.js delete mode 100644 test/js/node/test/parallel/test-fs-watch-file-enoent-after-deletion.js delete mode 100644 test/js/node/test/parallel/test-timers-immediate-queue.js delete mode 100644 test/js/node/test/parallel/test-zlib-dictionary.js diff --git a/test/js/node/test/parallel/test-fs-readfile-flags.js b/test/js/node/test/parallel/test-fs-readfile-flags.js deleted file mode 100644 index 72b910aeeb48d6..00000000000000 --- a/test/js/node/test/parallel/test-fs-readfile-flags.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict'; - -// Test of fs.readFile with different flags. -const common = require('../common'); -const fs = require('fs'); -const assert = require('assert'); -const tmpdir = require('../common/tmpdir'); - -tmpdir.refresh(); - -{ - const emptyFile = tmpdir.resolve('empty.txt'); - fs.closeSync(fs.openSync(emptyFile, 'w')); - - fs.readFile( - emptyFile, - // With `a+` the file is created if it does not exist - common.mustNotMutateObjectDeep({ encoding: 'utf8', flag: 'a+' }), - common.mustCall((err, data) => { assert.strictEqual(data, ''); }) - ); - - fs.readFile( - emptyFile, - // Like `a+` but fails if the path exists. - common.mustNotMutateObjectDeep({ encoding: 'utf8', flag: 'ax+' }), - common.mustCall((err, data) => { assert.strictEqual(err.code, 'EEXIST'); }) - ); -} - -{ - const willBeCreated = tmpdir.resolve('will-be-created'); - - fs.readFile( - willBeCreated, - // With `a+` the file is created if it does not exist - common.mustNotMutateObjectDeep({ encoding: 'utf8', flag: 'a+' }), - common.mustCall((err, data) => { assert.strictEqual(data, ''); }) - ); -} - -{ - const willNotBeCreated = tmpdir.resolve('will-not-be-created'); - - fs.readFile( - willNotBeCreated, - // Default flag is `r`. An exception occurs if the file does not exist. - common.mustNotMutateObjectDeep({ encoding: 'utf8' }), - common.mustCall((err, data) => { assert.strictEqual(err.code, 'ENOENT'); }) - ); -} diff --git a/test/js/node/test/parallel/test-fs-watch-file-enoent-after-deletion.js b/test/js/node/test/parallel/test-fs-watch-file-enoent-after-deletion.js deleted file mode 100644 index e4baf90fd17b94..00000000000000 --- a/test/js/node/test/parallel/test-fs-watch-file-enoent-after-deletion.js +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; -const common = require('../common'); - -// Make sure the deletion event gets reported in the following scenario: -// 1. Watch a file. -// 2. The initial stat() goes okay. -// 3. Something deletes the watched file. -// 4. The second stat() fails with ENOENT. - -// The second stat() translates into the first 'change' event but a logic error -// stopped it from getting emitted. -// https://github.com/nodejs/node-v0.x-archive/issues/4027 - -const fs = require('fs'); - -const tmpdir = require('../common/tmpdir'); -tmpdir.refresh(); - -const filename = tmpdir.resolve('watched'); -fs.writeFileSync(filename, 'quis custodiet ipsos custodes'); - -fs.watchFile(filename, { interval: 50 }, common.mustCall(function(curr, prev) { - fs.unwatchFile(filename); -})); - -fs.unlinkSync(filename); diff --git a/test/js/node/test/parallel/test-timers-immediate-queue.js b/test/js/node/test/parallel/test-timers-immediate-queue.js deleted file mode 100644 index 9bd8aa1bc7a79a..00000000000000 --- a/test/js/node/test/parallel/test-timers-immediate-queue.js +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; -const common = require('../common'); -if (common.isWindows) return; // TODO BUN -const assert = require('assert'); - -// setImmediate should run clear its queued cbs once per event loop turn -// but immediates queued while processing the current queue should happen -// on the next turn of the event loop. - -// hit should be the exact same size of QUEUE, if we're letting things -// recursively add to the immediate QUEUE hit will be > QUEUE - -let ticked = false; - -let hit = 0; -const QUEUE = 10; - -function run() { - if (hit === 0) { - setTimeout(() => { ticked = true; }, 1); - const now = Date.now(); - while (Date.now() - now < 2); - } - - if (ticked) return; - - hit += 1; - setImmediate(run); -} - -for (let i = 0; i < QUEUE; i++) - setImmediate(run); - -process.on('exit', function() { - assert.strictEqual(hit, QUEUE); -}); diff --git a/test/js/node/test/parallel/test-zlib-dictionary.js b/test/js/node/test/parallel/test-zlib-dictionary.js deleted file mode 100644 index 49a01d5a03ee4b..00000000000000 --- a/test/js/node/test/parallel/test-zlib-dictionary.js +++ /dev/null @@ -1,175 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; -// Test compression/decompression with dictionary - -const common = require('../common'); -const assert = require('assert'); -const zlib = require('zlib'); - -const spdyDict = Buffer.from([ - 'optionsgetheadpostputdeletetraceacceptaccept-charsetaccept-encodingaccept-', - 'languageauthorizationexpectfromhostif-modified-sinceif-matchif-none-matchi', - 'f-rangeif-unmodifiedsincemax-forwardsproxy-authorizationrangerefererteuser', - '-agent10010120020120220320420520630030130230330430530630740040140240340440', - '5406407408409410411412413414415416417500501502503504505accept-rangesageeta', - 'glocationproxy-authenticatepublicretry-afterservervarywarningwww-authentic', - 'ateallowcontent-basecontent-encodingcache-controlconnectiondatetrailertran', - 'sfer-encodingupgradeviawarningcontent-languagecontent-lengthcontent-locati', - 'oncontent-md5content-rangecontent-typeetagexpireslast-modifiedset-cookieMo', - 'ndayTuesdayWednesdayThursdayFridaySaturdaySundayJanFebMarAprMayJunJulAugSe', - 'pOctNovDecchunkedtext/htmlimage/pngimage/jpgimage/gifapplication/xmlapplic', - 'ation/xhtmltext/plainpublicmax-agecharset=iso-8859-1utf-8gzipdeflateHTTP/1', - '.1statusversionurl\0', -].join('')); - -const input = [ - 'HTTP/1.1 200 Ok', - 'Server: node.js', - 'Content-Length: 0', - '', -].join('\r\n'); - -function basicDictionaryTest(spdyDict) { - let output = ''; - const deflate = zlib.createDeflate({ dictionary: spdyDict }); - const inflate = zlib.createInflate({ dictionary: spdyDict }); - inflate.setEncoding('utf-8'); - - deflate.on('data', function(chunk) { - inflate.write(chunk); - }); - - inflate.on('data', function(chunk) { - output += chunk; - }); - - deflate.on('end', function() { - inflate.end(); - }); - - inflate.on('end', common.mustCall(function() { - assert.strictEqual(input, output); - })); - - deflate.write(input); - deflate.end(); -} - -function deflateResetDictionaryTest(spdyDict) { - let doneReset = false; - let output = ''; - const deflate = zlib.createDeflate({ dictionary: spdyDict }); - const inflate = zlib.createInflate({ dictionary: spdyDict }); - inflate.setEncoding('utf-8'); - - deflate.on('data', function(chunk) { - if (doneReset) - inflate.write(chunk); - }); - - inflate.on('data', function(chunk) { - output += chunk; - }); - - deflate.on('end', function() { - inflate.end(); - }); - - inflate.on('end', common.mustCall(function() { - assert.strictEqual(input, output); - })); - - deflate.write(input); - deflate.flush(function() { - deflate.reset(); - doneReset = true; - deflate.write(input); - deflate.end(); - }); -} - -function rawDictionaryTest(spdyDict) { - let output = ''; - const deflate = zlib.createDeflateRaw({ dictionary: spdyDict }); - const inflate = zlib.createInflateRaw({ dictionary: spdyDict }); - inflate.setEncoding('utf-8'); - - deflate.on('data', function(chunk) { - inflate.write(chunk); - }); - - inflate.on('data', function(chunk) { - output += chunk; - }); - - deflate.on('end', function() { - inflate.end(); - }); - - inflate.on('end', common.mustCall(function() { - assert.strictEqual(input, output); - })); - - deflate.write(input); - deflate.end(); -} - -function deflateRawResetDictionaryTest(spdyDict) { - let doneReset = false; - let output = ''; - const deflate = zlib.createDeflateRaw({ dictionary: spdyDict }); - const inflate = zlib.createInflateRaw({ dictionary: spdyDict }); - inflate.setEncoding('utf-8'); - - deflate.on('data', function(chunk) { - if (doneReset) - inflate.write(chunk); - }); - - inflate.on('data', function(chunk) { - output += chunk; - }); - - deflate.on('end', function() { - inflate.end(); - }); - - inflate.on('end', common.mustCall(function() { - assert.strictEqual(input, output); - })); - - deflate.write(input); - deflate.flush(function() { - deflate.reset(); - doneReset = true; - deflate.write(input); - deflate.end(); - }); -} - -for (const dict of [spdyDict, ...common.getBufferSources(spdyDict)]) { - basicDictionaryTest(dict); - deflateResetDictionaryTest(dict); - rawDictionaryTest(dict); - deflateRawResetDictionaryTest(dict); -} From 24d73e948a0b156291e37c3fbc26f723fba0cc04 Mon Sep 17 00:00:00 2001 From: Don Isaac Date: Wed, 11 Dec 2024 00:34:58 -0800 Subject: [PATCH 4/7] test(node): add passing path parse format test (#15703) Co-authored-by: Don Isaac --- .../test/parallel/test-path-parse-format.js | 226 ++++++++++++++++++ 1 file changed, 226 insertions(+) create mode 100644 test/js/node/test/parallel/test-path-parse-format.js diff --git a/test/js/node/test/parallel/test-path-parse-format.js b/test/js/node/test/parallel/test-path-parse-format.js new file mode 100644 index 00000000000000..ca14120422b2ee --- /dev/null +++ b/test/js/node/test/parallel/test-path-parse-format.js @@ -0,0 +1,226 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const path = require('path'); + +const winPaths = [ + // [path, root] + ['C:\\path\\dir\\index.html', 'C:\\'], + ['C:\\another_path\\DIR\\1\\2\\33\\\\index', 'C:\\'], + ['another_path\\DIR with spaces\\1\\2\\33\\index', ''], + ['\\', '\\'], + ['\\foo\\C:', '\\'], + ['file', ''], + ['file:stream', ''], + ['.\\file', ''], + ['C:', 'C:'], + ['C:.', 'C:'], + ['C:..', 'C:'], + ['C:abc', 'C:'], + ['C:\\', 'C:\\'], + ['C:\\abc', 'C:\\' ], + ['', ''], + + // unc + ['\\\\server\\share\\file_path', '\\\\server\\share\\'], + ['\\\\server two\\shared folder\\file path.zip', + '\\\\server two\\shared folder\\'], + ['\\\\teela\\admin$\\system32', '\\\\teela\\admin$\\'], + ['\\\\?\\UNC\\server\\share', '\\\\?\\UNC\\'], +]; + +const winSpecialCaseParseTests = [ + ['t', { base: 't', name: 't', root: '', dir: '', ext: '' }], + ['/foo/bar', { root: '/', dir: '/foo', base: 'bar', ext: '', name: 'bar' }], +]; + +const winSpecialCaseFormatTests = [ + [{ dir: 'some\\dir' }, 'some\\dir\\'], + [{ base: 'index.html' }, 'index.html'], + [{ root: 'C:\\' }, 'C:\\'], + [{ name: 'index', ext: '.html' }, 'index.html'], + [{ dir: 'some\\dir', name: 'index', ext: '.html' }, 'some\\dir\\index.html'], + [{ root: 'C:\\', name: 'index', ext: '.html' }, 'C:\\index.html'], + [{}, ''], +]; + +const unixPaths = [ + // [path, root] + ['/home/user/dir/file.txt', '/'], + ['/home/user/a dir/another File.zip', '/'], + ['/home/user/a dir//another&File.', '/'], + ['/home/user/a$$$dir//another File.zip', '/'], + ['user/dir/another File.zip', ''], + ['file', ''], + ['.\\file', ''], + ['./file', ''], + ['C:\\foo', ''], + ['/', '/'], + ['', ''], + ['.', ''], + ['..', ''], + ['/foo', '/'], + ['/foo.', '/'], + ['/foo.bar', '/'], + ['/.', '/'], + ['/.foo', '/'], + ['/.foo.bar', '/'], + ['/foo/bar.baz', '/'], +]; + +const unixSpecialCaseFormatTests = [ + [{ dir: 'some/dir' }, 'some/dir/'], + [{ base: 'index.html' }, 'index.html'], + [{ root: '/' }, '/'], + [{ name: 'index', ext: '.html' }, 'index.html'], + [{ dir: 'some/dir', name: 'index', ext: '.html' }, 'some/dir/index.html'], + [{ root: '/', name: 'index', ext: '.html' }, '/index.html'], + [{}, ''], +]; + +const errors = [ + { method: 'parse', input: [null] }, + { method: 'parse', input: [{}] }, + { method: 'parse', input: [true] }, + { method: 'parse', input: [1] }, + { method: 'parse', input: [] }, + { method: 'format', input: [null] }, + { method: 'format', input: [''] }, + { method: 'format', input: [true] }, + { method: 'format', input: [1] }, +]; + +checkParseFormat(path.win32, winPaths); +checkParseFormat(path.posix, unixPaths); +checkSpecialCaseParseFormat(path.win32, winSpecialCaseParseTests); +checkErrors(path.win32); +checkErrors(path.posix); +checkFormat(path.win32, winSpecialCaseFormatTests); +checkFormat(path.posix, unixSpecialCaseFormatTests); + +// Test removal of trailing path separators +const trailingTests = [ + [ path.win32.parse, + [['.\\', { root: '', dir: '', base: '.', ext: '', name: '.' }], + ['\\\\', { root: '\\', dir: '\\', base: '', ext: '', name: '' }], + ['\\\\', { root: '\\', dir: '\\', base: '', ext: '', name: '' }], + ['c:\\foo\\\\\\', + { root: 'c:\\', dir: 'c:\\', base: 'foo', ext: '', name: 'foo' }], + ['D:\\foo\\\\\\bar.baz', + { root: 'D:\\', + dir: 'D:\\foo\\\\', + base: 'bar.baz', + ext: '.baz', + name: 'bar' }, + ], + ], + ], + [ path.posix.parse, + [['./', { root: '', dir: '', base: '.', ext: '', name: '.' }], + ['//', { root: '/', dir: '/', base: '', ext: '', name: '' }], + ['///', { root: '/', dir: '/', base: '', ext: '', name: '' }], + ['/foo///', { root: '/', dir: '/', base: 'foo', ext: '', name: 'foo' }], + ['/foo///bar.baz', + { root: '/', dir: '/foo//', base: 'bar.baz', ext: '.baz', name: 'bar' }, + ], + ], + ], +]; +const failures = []; +for (const [parse, testList] of trailingTests) { + const os = parse === path.win32.parse ? 'win32' : 'posix'; + for (const [input, expected] of testList) { + const actual = parse(input); + const message = `path.${os}.parse(${JSON.stringify(input)})\n expect=${ + JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`; + const actualKeys = Object.keys(actual); + const expectedKeys = Object.keys(expected); + let failed = (actualKeys.length !== expectedKeys.length); + if (!failed) { + for (let i = 0; i < actualKeys.length; ++i) { + const key = actualKeys[i]; + if (!expectedKeys.includes(key) || actual[key] !== expected[key]) { + failed = true; + break; + } + } + } + if (failed) + failures.push(`\n${message}`); + } +} +assert.strictEqual(failures.length, 0, failures.join('')); + +function checkErrors(path) { + errors.forEach(({ method, input }) => { + assert.throws(() => { + path[method].apply(path, input); + }, { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError' + }); + }); +} + +function checkParseFormat(path, paths) { + paths.forEach(([element, root]) => { + const output = path.parse(element); + assert.strictEqual(typeof output.root, 'string'); + assert.strictEqual(typeof output.dir, 'string'); + assert.strictEqual(typeof output.base, 'string'); + assert.strictEqual(typeof output.ext, 'string'); + assert.strictEqual(typeof output.name, 'string'); + assert.strictEqual(path.format(output), element); + assert.strictEqual(output.root, root); + assert(output.dir.startsWith(output.root)); + assert.strictEqual(output.dir, output.dir ? path.dirname(element) : ''); + assert.strictEqual(output.base, path.basename(element)); + assert.strictEqual(output.ext, path.extname(element)); + }); +} + +function checkSpecialCaseParseFormat(path, testCases) { + testCases.forEach(([element, expect]) => { + assert.deepStrictEqual(path.parse(element), expect); + }); +} + +function checkFormat(path, testCases) { + testCases.forEach(([element, expect]) => { + assert.strictEqual(path.format(element), expect); + }); + + [null, undefined, 1, true, false, 'string'].forEach((pathObject) => { + assert.throws(() => { + path.format(pathObject); + }, { + code: 'ERR_INVALID_ARG_TYPE', + name: 'TypeError', + }); + }); +} + +// See https://github.com/nodejs/node/issues/44343 +assert.strictEqual(path.format({ name: 'x', ext: 'png' }), 'x.png'); +assert.strictEqual(path.format({ name: 'x', ext: '.png' }), 'x.png'); From 78445c543ef21a3ad15a33d921da079485ddd9c9 Mon Sep 17 00:00:00 2001 From: Don Isaac Date: Wed, 11 Dec 2024 01:07:57 -0800 Subject: [PATCH 5/7] refactor: set default for name in `ErrorCode.ts` (#15699) Co-authored-by: Don Isaac --- src/bun.js/bindings/ErrorCode.ts | 129 ++++++++++++++-------------- src/codegen/generate-node-errors.ts | 3 +- 2 files changed, 67 insertions(+), 65 deletions(-) diff --git a/src/bun.js/bindings/ErrorCode.ts b/src/bun.js/bindings/ErrorCode.ts index a2184f7215f807..427f71fe198290 100644 --- a/src/bun.js/bindings/ErrorCode.ts +++ b/src/bun.js/bindings/ErrorCode.ts @@ -1,84 +1,85 @@ +// used by generate-node-errors.ts type ErrorCodeMapping = Array< [ /** error.code */ string, /** Constructor **/ typeof TypeError | typeof RangeError | typeof Error | typeof SyntaxError, - /** error.name */ + /** error.name. Defaults to `Constructor.name` (that is, mapping[1].name */ string, ] >; export default [ ["ABORT_ERR", Error, "AbortError"], - ["ERR_CRYPTO_INVALID_DIGEST", TypeError, "TypeError"], - ["ERR_ENCODING_INVALID_ENCODED_DATA", TypeError, "TypeError"], - ["ERR_INVALID_ARG_TYPE", TypeError, "TypeError"], - ["ERR_INVALID_ARG_VALUE", TypeError, "TypeError"], - ["ERR_INVALID_PROTOCOL", TypeError, "TypeError"], - ["ERR_INVALID_THIS", TypeError, "TypeError"], - ["ERR_IPC_CHANNEL_CLOSED", Error, "Error"], - ["ERR_IPC_DISCONNECTED", Error, "Error"], - ["ERR_MISSING_ARGS", TypeError, "TypeError"], - ["ERR_OUT_OF_RANGE", RangeError, "RangeError"], - ["ERR_PARSE_ARGS_INVALID_OPTION_VALUE", TypeError, "TypeError"], - ["ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL", TypeError, "TypeError"], - ["ERR_PARSE_ARGS_UNKNOWN_OPTION", TypeError, "TypeError"], - ["ERR_SERVER_NOT_RUNNING", Error, "Error"], - ["ERR_SOCKET_BAD_TYPE", TypeError, "TypeError"], - ["ERR_STREAM_ALREADY_FINISHED", TypeError, "TypeError"], - ["ERR_STREAM_CANNOT_PIPE", TypeError, "TypeError"], - ["ERR_STREAM_DESTROYED", TypeError, "TypeError"], - ["ERR_STREAM_NULL_VALUES", TypeError, "TypeError"], - ["ERR_STREAM_WRITE_AFTER_END", TypeError, "TypeError"], - ["ERR_ZLIB_INITIALIZATION_FAILED", Error, "Error"], - ["ERR_STRING_TOO_LONG", Error, "Error"], - ["ERR_CRYPTO_SCRYPT_INVALID_PARAMETER", Error, "Error"], - ["ERR_CRYPTO_INVALID_SCRYPT_PARAMS", RangeError, "RangeError"], - ["MODULE_NOT_FOUND", Error, "Error"], - ["ERR_ILLEGAL_CONSTRUCTOR", TypeError, "TypeError"], - ["ERR_INVALID_URL", TypeError, "TypeError"], - ["ERR_BUFFER_TOO_LARGE", RangeError, "RangeError"], - ["ERR_BROTLI_INVALID_PARAM", RangeError, "RangeError"], - ["ERR_UNKNOWN_ENCODING", TypeError, "TypeError"], - ["ERR_INVALID_STATE", Error, "Error"], - ["ERR_BUFFER_OUT_OF_BOUNDS", RangeError, "RangeError"], - ["ERR_UNKNOWN_SIGNAL", TypeError, "TypeError"], - ["ERR_SOCKET_BAD_PORT", RangeError, "RangeError"], + ["ERR_CRYPTO_INVALID_DIGEST", TypeError], + ["ERR_ENCODING_INVALID_ENCODED_DATA", TypeError], + ["ERR_INVALID_ARG_TYPE", TypeError], + ["ERR_INVALID_ARG_VALUE", TypeError], + ["ERR_INVALID_PROTOCOL", TypeError], + ["ERR_INVALID_THIS", TypeError], + ["ERR_IPC_CHANNEL_CLOSED", Error], + ["ERR_IPC_DISCONNECTED", Error], + ["ERR_MISSING_ARGS", TypeError], + ["ERR_OUT_OF_RANGE", RangeError], + ["ERR_PARSE_ARGS_INVALID_OPTION_VALUE", TypeError], + ["ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL", TypeError], + ["ERR_PARSE_ARGS_UNKNOWN_OPTION", TypeError], + ["ERR_SERVER_NOT_RUNNING", Error], + ["ERR_SOCKET_BAD_TYPE", TypeError], + ["ERR_STREAM_ALREADY_FINISHED", TypeError], + ["ERR_STREAM_CANNOT_PIPE", TypeError], + ["ERR_STREAM_DESTROYED", TypeError], + ["ERR_STREAM_NULL_VALUES", TypeError], + ["ERR_STREAM_WRITE_AFTER_END", TypeError], + ["ERR_ZLIB_INITIALIZATION_FAILED", Error], + ["ERR_STRING_TOO_LONG", Error], + ["ERR_CRYPTO_SCRYPT_INVALID_PARAMETER", Error], + ["ERR_CRYPTO_INVALID_SCRYPT_PARAMS", RangeError], + ["MODULE_NOT_FOUND", Error], + ["ERR_ILLEGAL_CONSTRUCTOR", TypeError], + ["ERR_INVALID_URL", TypeError], + ["ERR_BUFFER_TOO_LARGE", RangeError], + ["ERR_BROTLI_INVALID_PARAM", RangeError], + ["ERR_UNKNOWN_ENCODING", TypeError], + ["ERR_INVALID_STATE", Error], + ["ERR_BUFFER_OUT_OF_BOUNDS", RangeError], + ["ERR_UNKNOWN_SIGNAL", TypeError], + ["ERR_SOCKET_BAD_PORT", RangeError], ["ERR_STREAM_RELEASE_LOCK", Error, "AbortError"], // Bun-specific - ["ERR_FORMDATA_PARSE_ERROR", TypeError, "TypeError"], - ["ERR_BODY_ALREADY_USED", Error, "Error"], - ["ERR_STREAM_WRAP", Error, "Error"], - ["ERR_BORINGSSL", Error, "Error"], + ["ERR_FORMDATA_PARSE_ERROR", TypeError], + ["ERR_BODY_ALREADY_USED", Error], + ["ERR_STREAM_WRAP", Error], + ["ERR_BORINGSSL", Error], //NET - ["ERR_SOCKET_CLOSED_BEFORE_CONNECTION", Error, "Error"], - ["ERR_SOCKET_CLOSED", Error, "Error"], + ["ERR_SOCKET_CLOSED_BEFORE_CONNECTION", Error], + ["ERR_SOCKET_CLOSED", Error], //HTTP2 - ["ERR_INVALID_HTTP_TOKEN", TypeError, "TypeError"], - ["ERR_HTTP2_PSEUDOHEADER_NOT_ALLOWED", TypeError, "TypeError"], - ["ERR_HTTP2_SEND_FILE", Error, "Error"], - ["ERR_HTTP2_SEND_FILE_NOSEEK", Error, "Error"], + ["ERR_INVALID_HTTP_TOKEN", TypeError], + ["ERR_HTTP2_PSEUDOHEADER_NOT_ALLOWED", TypeError], + ["ERR_HTTP2_SEND_FILE", Error], + ["ERR_HTTP2_SEND_FILE_NOSEEK", Error], ["ERR_HTTP2_HEADERS_SENT", Error, "ERR_HTTP2_HEADERS_SENT"], - ["ERR_HTTP2_INFO_STATUS_NOT_ALLOWED", RangeError, "RangeError"], - ["ERR_HTTP2_STATUS_INVALID", RangeError, "RangeError"], - ["ERR_HTTP2_INVALID_PSEUDOHEADER", TypeError, "TypeError"], - ["ERR_HTTP2_INVALID_HEADER_VALUE", TypeError, "TypeError"], - ["ERR_HTTP2_PING_CANCEL", Error, "Error"], - ["ERR_HTTP2_STREAM_ERROR", Error, "Error"], - ["ERR_HTTP2_INVALID_SINGLE_VALUE_HEADER", TypeError, "TypeError"], - ["ERR_HTTP2_SESSION_ERROR", Error, "Error"], - ["ERR_HTTP2_INVALID_SESSION", Error, "Error"], - ["ERR_HTTP2_INVALID_HEADERS", Error, "Error"], - ["ERR_HTTP2_PING_LENGTH", RangeError, "RangeError"], - ["ERR_HTTP2_INVALID_STREAM", Error, "Error"], - ["ERR_HTTP2_TRAILERS_ALREADY_SENT", Error, "Error"], - ["ERR_HTTP2_TRAILERS_NOT_READY", Error, "Error"], - ["ERR_HTTP2_PAYLOAD_FORBIDDEN", Error, "Error"], - ["ERR_HTTP2_NO_SOCKET_MANIPULATION", Error, "Error"], - ["ERR_HTTP2_SOCKET_UNBOUND", Error, "Error"], - ["ERR_HTTP2_ERROR", Error, "Error"], - ["ERR_HTTP2_OUT_OF_STREAMS", Error, "Error"], + ["ERR_HTTP2_INFO_STATUS_NOT_ALLOWED", RangeError], + ["ERR_HTTP2_STATUS_INVALID", RangeError], + ["ERR_HTTP2_INVALID_PSEUDOHEADER", TypeError], + ["ERR_HTTP2_INVALID_HEADER_VALUE", TypeError], + ["ERR_HTTP2_PING_CANCEL", Error], + ["ERR_HTTP2_STREAM_ERROR", Error], + ["ERR_HTTP2_INVALID_SINGLE_VALUE_HEADER", TypeError], + ["ERR_HTTP2_SESSION_ERROR", Error], + ["ERR_HTTP2_INVALID_SESSION", Error], + ["ERR_HTTP2_INVALID_HEADERS", Error], + ["ERR_HTTP2_PING_LENGTH", RangeError], + ["ERR_HTTP2_INVALID_STREAM", Error], + ["ERR_HTTP2_TRAILERS_ALREADY_SENT", Error], + ["ERR_HTTP2_TRAILERS_NOT_READY", Error], + ["ERR_HTTP2_PAYLOAD_FORBIDDEN", Error], + ["ERR_HTTP2_NO_SOCKET_MANIPULATION", Error], + ["ERR_HTTP2_SOCKET_UNBOUND", Error], + ["ERR_HTTP2_ERROR", Error], + ["ERR_HTTP2_OUT_OF_STREAMS", Error], ] as ErrorCodeMapping; diff --git a/src/codegen/generate-node-errors.ts b/src/codegen/generate-node-errors.ts index 6dfcedb4e1f096..debbb07fc50da9 100644 --- a/src/codegen/generate-node-errors.ts +++ b/src/codegen/generate-node-errors.ts @@ -67,7 +67,8 @@ pub const Error = enum(u8) { let i = 0; let listForUsingNamespace = ""; -for (const [code, constructor, name] of NodeErrors) { +for (let [code, constructor, name] of NodeErrors) { + if (name == null) name = constructor.name; enumHeader += ` ${code} = ${i},\n`; listHeader += ` { JSC::ErrorType::${constructor.name}, "${name}"_s, "${code}"_s },\n`; zig += ` ${code} = ${i},\n`; From b55ca429c728e4c90553f3866484e0bf10bf1fff Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Wed, 11 Dec 2024 05:05:49 -0800 Subject: [PATCH 6/7] Implement text-based lockfile (#15705) --- src/allocators.zig | 13 +- src/bun.js/module_loader.zig | 6 +- src/cli.zig | 11 +- src/cli/outdated_command.zig | 3 +- src/cli/pack_command.zig | 3 +- src/cli/package_manager_command.zig | 62 +- src/cli/pm_trusted_command.zig | 35 +- src/cli/publish_command.zig | 3 +- src/fmt.zig | 15 +- src/install/bin.zig | 139 +- src/install/bun.lock.zig | 1596 +++++++++++++++++++++++ src/install/dependency.zig | 14 +- src/install/extract_tarball.zig | 34 +- src/install/install.zig | 610 ++++++--- src/install/integrity.zig | 2 +- src/install/lifecycle_script_runner.zig | 4 +- src/install/lockfile.zig | 549 +++++--- src/install/migration.zig | 45 +- src/install/npm.zig | 137 +- src/install/patch_install.zig | 6 +- src/install/repository.zig | 54 + src/install/resolution.zig | 88 +- src/install/semver.zig | 157 +++ src/install/versioned_url.zig | 4 - src/json_parser.zig | 163 ++- src/string_mutable.zig | 16 +- 26 files changed, 3126 insertions(+), 643 deletions(-) create mode 100644 src/install/bun.lock.zig diff --git a/src/allocators.zig b/src/allocators.zig index e43c11ed074ee0..32b3e5fd8a21aa 100644 --- a/src/allocators.zig +++ b/src/allocators.zig @@ -4,6 +4,7 @@ const FeatureFlags = @import("./feature_flags.zig"); const Environment = @import("./env.zig"); const FixedBufferAllocator = std.heap.FixedBufferAllocator; const bun = @import("root").bun; +const OOM = bun.OOM; pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool { return (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and @@ -328,7 +329,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type return @constCast(slice); } - pub fn appendMutable(self: *Self, comptime AppendType: type, _value: AppendType) ![]u8 { + pub fn appendMutable(self: *Self, comptime AppendType: type, _value: AppendType) OOM![]u8 { const appended = try @call(bun.callmod_inline, append, .{ self, AppendType, _value }); return @constCast(appended); } @@ -337,17 +338,17 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type return try self.appendMutable(EmptyType, EmptyType{ .len = len }); } - pub fn printWithType(self: *Self, comptime fmt: []const u8, comptime Args: type, args: Args) ![]const u8 { + pub fn printWithType(self: *Self, comptime fmt: []const u8, comptime Args: type, args: Args) OOM![]const u8 { var buf = try self.appendMutable(EmptyType, EmptyType{ .len = std.fmt.count(fmt, args) + 1 }); buf[buf.len - 1] = 0; return std.fmt.bufPrint(buf.ptr[0 .. buf.len - 1], fmt, args) catch unreachable; } - pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) ![]const u8 { + pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) OOM![]const u8 { return try printWithType(self, fmt, @TypeOf(args), args); } - pub fn append(self: *Self, comptime AppendType: type, _value: AppendType) ![]const u8 { + pub fn append(self: *Self, comptime AppendType: type, _value: AppendType) OOM![]const u8 { self.mutex.lock(); defer self.mutex.unlock(); @@ -355,7 +356,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type } threadlocal var lowercase_append_buf: bun.PathBuffer = undefined; - pub fn appendLowerCase(self: *Self, comptime AppendType: type, _value: AppendType) ![]const u8 { + pub fn appendLowerCase(self: *Self, comptime AppendType: type, _value: AppendType) OOM![]const u8 { self.mutex.lock(); defer self.mutex.unlock(); @@ -374,7 +375,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type self: *Self, comptime AppendType: type, _value: AppendType, - ) ![]const u8 { + ) OOM![]const u8 { const value_len: usize = brk: { switch (comptime AppendType) { EmptyType, []const u8, []u8, [:0]const u8, [:0]u8 => { diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index c8c6c19393647c..a7d419ee91ac04 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -184,9 +184,9 @@ fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: [] \\ "mappings": "{}" \\}} , .{ - bun.fmt.formatJSONStringUTF8(std.fs.path.basename(specifier)), - bun.fmt.formatJSONStringUTF8(specifier), - bun.fmt.formatJSONStringUTF8(source_file), + bun.fmt.formatJSONStringUTF8(std.fs.path.basename(specifier), .{}), + bun.fmt.formatJSONStringUTF8(specifier, .{}), + bun.fmt.formatJSONStringUTF8(source_file, .{}), mappings.formatVLQs(), }); try bufw.flush(); diff --git a/src/cli.zig b/src/cli.zig index 17adb70c0480b2..49a9124216e25f 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -21,6 +21,7 @@ const js_ast = bun.JSAst; const linker = @import("linker.zig"); const RegularExpression = bun.RegularExpression; const builtin = @import("builtin"); +const File = bun.sys.File; const debug = Output.scoped(.CLI, true); @@ -2133,7 +2134,15 @@ pub const Command = struct { if (strings.eqlComptime(extension, ".lockb")) { for (bun.argv) |arg| { if (strings.eqlComptime(arg, "--hash")) { - try PackageManagerCommand.printHash(ctx, ctx.args.entry_points[0]); + var path_buf: bun.PathBuffer = undefined; + @memcpy(path_buf[0..ctx.args.entry_points[0].len], ctx.args.entry_points[0]); + path_buf[ctx.args.entry_points[0].len] = 0; + const lockfile_path = path_buf[0..ctx.args.entry_points[0].len :0]; + const file = File.open(lockfile_path, bun.O.RDONLY, 0).unwrap() catch |err| { + Output.err(err, "failed to open lockfile", .{}); + Global.crash(); + }; + try PackageManagerCommand.printHash(ctx, file); return; } } diff --git a/src/cli/outdated_command.zig b/src/cli/outdated_command.zig index 0a951002ba0e3a..f24d333182bcb3 100644 --- a/src/cli/outdated_command.zig +++ b/src/cli/outdated_command.zig @@ -44,11 +44,10 @@ pub const OutdatedCommand = struct { } fn outdated(ctx: Command.Context, original_cwd: string, manager: *PackageManager, comptime log_level: PackageManager.Options.LogLevel) !void { - const load_lockfile_result = manager.lockfile.loadFromDisk( + const load_lockfile_result = manager.lockfile.loadFromCwd( manager, manager.allocator, manager.log, - manager.options.lockfile_path, true, ); diff --git a/src/cli/pack_command.zig b/src/cli/pack_command.zig index 1ad073b2654ae1..195a9c3edfad8c 100644 --- a/src/cli/pack_command.zig +++ b/src/cli/pack_command.zig @@ -102,11 +102,10 @@ pub const PackCommand = struct { Output.flush(); var lockfile: Lockfile = undefined; - const load_from_disk_result = lockfile.loadFromDisk( + const load_from_disk_result = lockfile.loadFromCwd( manager, manager.allocator, manager.log, - manager.options.lockfile_path, false, ); diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index 03080a509424f5..9151b94d6f6fd2 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -15,7 +15,7 @@ const PackageID = Install.PackageID; const DependencyID = Install.DependencyID; const PackageManager = Install.PackageManager; const Lockfile = @import("../install/lockfile.zig"); -const NodeModulesFolder = Lockfile.Tree.NodeModulesFolder; +const NodeModulesFolder = Lockfile.Tree.Iterator(.node_modules).Next; const Path = @import("../resolver/resolve_path.zig"); const String = @import("../install/semver.zig").String; const ArrayIdentityContext = bun.ArrayIdentityContext; @@ -26,6 +26,7 @@ const DefaultTrustedCommand = @import("./pm_trusted_command.zig").DefaultTrusted const Environment = bun.Environment; pub const PackCommand = @import("./pack_command.zig").PackCommand; const Npm = Install.Npm; +const File = bun.sys.File; const ByName = struct { dependencies: []const Dependency, @@ -41,7 +42,7 @@ const ByName = struct { }; pub const PackageManagerCommand = struct { - pub fn handleLoadLockfileErrors(load_lockfile: Lockfile.LoadFromDiskResult, pm: *PackageManager) void { + pub fn handleLoadLockfileErrors(load_lockfile: Lockfile.LoadResult, pm: *PackageManager) void { if (load_lockfile == .not_found) { if (pm.options.log_level != .silent) { Output.errGeneric("Lockfile not found", .{}); @@ -57,17 +58,20 @@ pub const PackageManagerCommand = struct { } } - pub fn printHash(ctx: Command.Context, lockfile_: []const u8) !void { + pub fn printHash(ctx: Command.Context, file: File) !void { @setCold(true); - var lockfile_buffer: bun.PathBuffer = undefined; - @memcpy(lockfile_buffer[0..lockfile_.len], lockfile_); - lockfile_buffer[lockfile_.len] = 0; - const lockfile = lockfile_buffer[0..lockfile_.len :0]; + const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .pm); var pm, const cwd = try PackageManager.init(ctx, cli, PackageManager.Subcommand.pm); defer ctx.allocator.free(cwd); - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, lockfile, true); + const bytes = file.readToEnd(ctx.allocator).unwrap() catch |err| { + Output.err(err, "failed to read lockfile", .{}); + Global.crash(); + }; + + const load_lockfile = pm.lockfile.loadFromBytes(pm, bytes, ctx.allocator, ctx.log); + handleLoadLockfileErrors(load_lockfile, pm); Output.flush(); @@ -198,7 +202,7 @@ pub const PackageManagerCommand = struct { Output.flush(); return; } else if (strings.eqlComptime(subcommand, "hash")) { - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); handleLoadLockfileErrors(load_lockfile, pm); _ = try pm.lockfile.hasMetaHashChanged(false, pm.lockfile.packages.len); @@ -209,7 +213,7 @@ pub const PackageManagerCommand = struct { Output.enableBuffering(); Global.exit(0); } else if (strings.eqlComptime(subcommand, "hash-print")) { - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); handleLoadLockfileErrors(load_lockfile, pm); Output.flush(); @@ -218,7 +222,7 @@ pub const PackageManagerCommand = struct { Output.enableBuffering(); Global.exit(0); } else if (strings.eqlComptime(subcommand, "hash-string")) { - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); handleLoadLockfileErrors(load_lockfile, pm); _ = try pm.lockfile.hasMetaHashChanged(true, pm.lockfile.packages.len); @@ -291,19 +295,19 @@ pub const PackageManagerCommand = struct { try TrustCommand.exec(ctx, pm, args); Global.exit(0); } else if (strings.eqlComptime(subcommand, "ls")) { - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); handleLoadLockfileErrors(load_lockfile, pm); Output.flush(); Output.disableBuffering(); const lockfile = load_lockfile.ok.lockfile; - var iterator = Lockfile.Tree.Iterator.init(lockfile); + var iterator = Lockfile.Tree.Iterator(.node_modules).init(lockfile); var max_depth: usize = 0; var directories = std.ArrayList(NodeModulesFolder).init(ctx.allocator); defer directories.deinit(); - while (iterator.nextNodeModulesFolder(null)) |node_modules| { + while (iterator.next(null)) |node_modules| { const path_len = node_modules.relative_path.len; const path = try ctx.allocator.alloc(u8, path_len + 1); bun.copy(u8, path, node_modules.relative_path); @@ -341,7 +345,7 @@ pub const PackageManagerCommand = struct { const resolutions = slice.items(.resolution); const root_deps = slice.items(.dependencies)[0]; - Output.println("{s} node_modules ({d})", .{ path, dependencies.len }); + Output.println("{s} node_modules ({d})", .{ path, lockfile.buffers.hoisted_dependencies.items.len }); const string_bytes = lockfile.buffers.string_bytes.items; const sorted_dependencies = try ctx.allocator.alloc(DependencyID, root_deps.len); defer ctx.allocator.free(sorted_dependencies); @@ -369,21 +373,29 @@ pub const PackageManagerCommand = struct { Global.exit(0); } else if (strings.eqlComptime(subcommand, "migrate")) { - if (!pm.options.enable.force_save_lockfile) try_load_bun: { - std.fs.cwd().accessZ("bun.lockb", .{ .mode = .read_only }) catch break :try_load_bun; + if (!pm.options.enable.force_save_lockfile) { + if (bun.sys.existsZ("bun.lock")) { + Output.prettyErrorln( + \\error: bun.lock already exists + \\run with --force to overwrite + , .{}); + Global.exit(1); + } - Output.prettyErrorln( - \\error: bun.lockb already exists - \\run with --force to overwrite - , .{}); - Global.exit(1); + if (bun.sys.existsZ("bun.lockb")) { + Output.prettyErrorln( + \\error: bun.lockb already exists + \\run with --force to overwrite + , .{}); + Global.exit(1); + } } const load_lockfile = @import("../install/migration.zig").detectAndLoadOtherLockfile( pm.lockfile, + bun.FD.cwd(), pm, ctx.allocator, pm.log, - pm.options.lockfile_path, ); if (load_lockfile == .not_found) { Output.prettyErrorln( @@ -393,7 +405,9 @@ pub const PackageManagerCommand = struct { } handleLoadLockfileErrors(load_lockfile, pm); const lockfile = load_lockfile.ok.lockfile; - lockfile.saveToDisk(pm.options.lockfile_path, pm.options.log_level.isVerbose()); + + const save_format: Lockfile.LoadResult.LockfileFormat = if (pm.options.save_text_lockfile) .text else .binary; + lockfile.saveToDisk(save_format, pm.options.log_level.isVerbose()); Global.exit(0); } diff --git a/src/cli/pm_trusted_command.zig b/src/cli/pm_trusted_command.zig index 4528ce8bfa4815..a26f46a4f08b1b 100644 --- a/src/cli/pm_trusted_command.zig +++ b/src/cli/pm_trusted_command.zig @@ -37,12 +37,11 @@ pub const UntrustedCommand = struct { Output.prettyError("bun pm untrusted v" ++ Global.package_json_version_with_sha ++ "\n\n", .{}); Output.flush(); - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); PackageManagerCommand.handleLoadLockfileErrors(load_lockfile, pm); try pm.updateLockfileIfNeeded(load_lockfile); const packages = pm.lockfile.packages.slice(); - const metas: []Lockfile.Package.Meta = packages.items(.meta); const scripts: []Lockfile.Package.Scripts = packages.items(.scripts); const resolutions: []Install.Resolution = packages.items(.resolution); const buf = pm.lockfile.buffers.string_bytes.items; @@ -59,10 +58,8 @@ pub const UntrustedCommand = struct { // called alias because a dependency name is not always the package name const alias = dep.name.slice(buf); - if (metas[package_id].hasInstallScript()) { - if (!pm.lockfile.hasTrustedDependency(alias)) { - try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); - } + if (!pm.lockfile.hasTrustedDependency(alias)) { + try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); } } @@ -74,7 +71,7 @@ pub const UntrustedCommand = struct { var untrusted_deps: std.AutoArrayHashMapUnmanaged(DependencyID, Lockfile.Package.Scripts.List) = .{}; defer untrusted_deps.deinit(ctx.allocator); - var tree_iterator = Lockfile.Tree.Iterator.init(pm.lockfile); + var tree_iterator = Lockfile.Tree.Iterator(.node_modules).init(pm.lockfile); const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir); var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{}; @@ -82,7 +79,7 @@ pub const UntrustedCommand = struct { try abs_node_modules_path.appendSlice(ctx.allocator, top_level_without_trailing_slash); try abs_node_modules_path.append(ctx.allocator, std.fs.path.sep); - while (tree_iterator.nextNodeModulesFolder(null)) |node_modules| { + while (tree_iterator.next(null)) |node_modules| { // + 1 because we want to keep the path separator abs_node_modules_path.items.len = top_level_without_trailing_slash.len + 1; try abs_node_modules_path.appendSlice(ctx.allocator, node_modules.relative_path); @@ -187,7 +184,7 @@ pub const TrustCommand = struct { if (args.len == 2) errorExpectedArgs(); - const load_lockfile = pm.lockfile.loadFromDisk(pm, ctx.allocator, ctx.log, "bun.lockb", true); + const load_lockfile = pm.lockfile.loadFromCwd(pm, ctx.allocator, ctx.log, true); PackageManagerCommand.handleLoadLockfileErrors(load_lockfile, pm); try pm.updateLockfileIfNeeded(load_lockfile); @@ -203,7 +200,6 @@ pub const TrustCommand = struct { const buf = pm.lockfile.buffers.string_bytes.items; const packages = pm.lockfile.packages.slice(); - const metas: []Lockfile.Package.Meta = packages.items(.meta); const resolutions: []Install.Resolution = packages.items(.resolution); const scripts: []Lockfile.Package.Scripts = packages.items(.scripts); @@ -216,10 +212,8 @@ pub const TrustCommand = struct { const alias = dep.name.slice(buf); - if (metas[package_id].hasInstallScript()) { - if (!pm.lockfile.hasTrustedDependency(alias)) { - try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); - } + if (!pm.lockfile.hasTrustedDependency(alias)) { + try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); } } @@ -231,7 +225,7 @@ pub const TrustCommand = struct { // Instead of running them right away, we group scripts by depth in the node_modules // file structure, then run them starting at max depth. This ensures lifecycle scripts are run // in the correct order as they would during a normal install - var tree_iter = Lockfile.Tree.Iterator.init(pm.lockfile); + var tree_iter = Lockfile.Tree.Iterator(.node_modules).init(pm.lockfile); const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir); var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{}; @@ -248,7 +242,7 @@ pub const TrustCommand = struct { var scripts_count: usize = 0; - while (tree_iter.nextNodeModulesFolder(null)) |node_modules| { + while (tree_iter.next(null)) |node_modules| { abs_node_modules_path.items.len = top_level_without_trailing_slash.len + 1; try abs_node_modules_path.appendSlice(ctx.allocator, node_modules.relative_path); @@ -423,7 +417,14 @@ pub const TrustCommand = struct { try pm.lockfile.trusted_dependencies.?.put(ctx.allocator, @truncate(String.Builder.stringHash(name)), {}); } - pm.lockfile.saveToDisk(pm.options.lockfile_path, pm.options.log_level.isVerbose()); + const save_format: Lockfile.LoadResult.LockfileFormat = if (pm.options.save_text_lockfile) + .text + else switch (load_lockfile) { + .not_found => .binary, + .err => |err| err.format, + .ok => |ok| ok.format, + }; + pm.lockfile.saveToDisk(save_format, pm.options.log_level.isVerbose()); var buffer_writer = try bun.js_printer.BufferWriter.init(ctx.allocator); try buffer_writer.buffer.list.ensureTotalCapacity(ctx.allocator, package_json_contents.len + 1); diff --git a/src/cli/publish_command.zig b/src/cli/publish_command.zig index 466f7ef488cd06..b62ae823d1ee0c 100644 --- a/src/cli/publish_command.zig +++ b/src/cli/publish_command.zig @@ -281,11 +281,10 @@ pub const PublishCommand = struct { manager: *PackageManager, ) FromWorkspaceError!Context(directory_publish) { var lockfile: Lockfile = undefined; - const load_from_disk_result = lockfile.loadFromDisk( + const load_from_disk_result = lockfile.loadFromCwd( manager, manager.allocator, manager.log, - manager.options.lockfile_path, false, ); diff --git a/src/fmt.zig b/src/fmt.zig index 73277701cc7f76..33d25bef1db440 100644 --- a/src/fmt.zig +++ b/src/fmt.zig @@ -234,9 +234,18 @@ const JSONFormatter = struct { const JSONFormatterUTF8 = struct { input: []const u8, + opts: Options, + + pub const Options = struct { + quote: bool = true, + }; pub fn format(self: JSONFormatterUTF8, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - try bun.js_printer.writeJSONString(self.input, @TypeOf(writer), writer, .utf8); + if (self.opts.quote) { + try bun.js_printer.writeJSONString(self.input, @TypeOf(writer), writer, .utf8); + } else { + try bun.js_printer.writePreQuotedString(self.input, @TypeOf(writer), writer, '"', false, true, .utf8); + } } }; @@ -245,8 +254,8 @@ pub fn formatJSONString(text: []const u8) JSONFormatter { return .{ .input = text }; } -pub fn formatJSONStringUTF8(text: []const u8) JSONFormatterUTF8 { - return .{ .input = text }; +pub fn formatJSONStringUTF8(text: []const u8, opts: JSONFormatterUTF8.Options) JSONFormatterUTF8 { + return .{ .input = text, .opts = opts }; } const SharedTempBuffer = [32 * 1024]u8; diff --git a/src/install/bin.zig b/src/install/bin.zig index 1667455d2605ed..9837f80206cc29 100644 --- a/src/install/bin.zig +++ b/src/install/bin.zig @@ -17,6 +17,9 @@ const string = bun.string; const Install = @import("./install.zig"); const PackageInstall = Install.PackageInstall; const Dependency = @import("./dependency.zig"); +const OOM = bun.OOM; +const JSON = bun.JSON; +const Lockfile = Install.Lockfile; /// Normalized `bin` field in [package.json](https://docs.npmjs.com/cli/v8/configuring-npm/package-json#bin) /// Can be a: @@ -25,11 +28,16 @@ const Dependency = @import("./dependency.zig"); /// - map where keys are names of the binaries and values are file paths to the binaries pub const Bin = extern struct { tag: Tag = Tag.none, - _padding_tag: [3]u8 = .{0} ** 3, + unset: u8 = 0, + _padding_tag: [2]u8 = .{0} ** 2, // Largest member must be zero initialized value: Value = Value{ .map = ExternalStringList{} }, + pub fn isUnset(this: *const Bin) bool { + return this.unset != 0; + } + pub fn count(this: *const Bin, buf: []const u8, extern_strings: []const ExternalString, comptime StringBuilder: type, builder: StringBuilder) u32 { switch (this.tag) { .file => builder.count(this.value.file.slice(buf)), @@ -56,18 +64,21 @@ pub const Bin = extern struct { .none => { return Bin{ .tag = .none, + .unset = this.unset, .value = Value.init(.{ .none = {} }), }; }, .file => { return Bin{ .tag = .file, + .unset = this.unset, .value = Value.init(.{ .file = builder.append(String, this.value.file.slice(buf)) }), }; }, .named_file => { return Bin{ .tag = .named_file, + .unset = this.unset, .value = Value.init( .{ .named_file = [2]String{ @@ -81,6 +92,7 @@ pub const Bin = extern struct { .dir => { return Bin{ .tag = .dir, + .unset = this.unset, .value = Value.init(.{ .dir = builder.append(String, this.value.dir.slice(buf)) }), }; }, @@ -91,6 +103,7 @@ pub const Bin = extern struct { return Bin{ .tag = .map, + .unset = this.unset, .value = Value.init(.{ .map = ExternalStringList.init(all_extern_strings, extern_strings_slice) }), }; }, @@ -99,6 +112,130 @@ pub const Bin = extern struct { unreachable; } + pub fn cloneAppend(this: *const Bin, this_buf: string, this_extern_strings: []const ExternalString, lockfile: *Lockfile) OOM!Bin { + var string_buf = lockfile.stringBuf(); + defer string_buf.apply(lockfile); + + const cloned: Bin = .{ + .tag = this.tag, + .unset = this.unset, + + .value = switch (this.tag) { + .none => Value.init(.{ .none = {} }), + .file => Value.init(.{ + .file = try string_buf.append(this.value.file.slice(this_buf)), + }), + .named_file => Value.init(.{ .named_file = .{ + try string_buf.append(this.value.named_file[0].slice(this_buf)), + try string_buf.append(this.value.named_file[1].slice(this_buf)), + } }), + .dir => Value.init(.{ + .dir = try string_buf.append(this.value.dir.slice(this_buf)), + }), + .map => map: { + const off = lockfile.buffers.extern_strings.items.len; + for (this.value.map.get(this_extern_strings)) |extern_string| { + try lockfile.buffers.extern_strings.append( + lockfile.allocator, + try string_buf.appendExternal(extern_string.slice(this_buf)), + ); + } + const new = lockfile.buffers.extern_strings.items[off..]; + break :map Value.init(.{ + .map = ExternalStringList.init(lockfile.buffers.extern_strings.items, new), + }); + }, + }, + }; + + return cloned; + } + + /// Used for packages read from text lockfile. + pub fn parseAppend( + allocator: std.mem.Allocator, + bin_expr: JSON.Expr, + buf: *String.Buf, + extern_strings: *std.ArrayListUnmanaged(ExternalString), + ) OOM!Bin { + switch (bin_expr.data) { + .e_object => |obj| { + switch (obj.properties.len) { + 0 => {}, + 1 => { + const bin_name = obj.properties.ptr[0].key.?.asString(allocator) orelse return .{}; + const value = obj.properties.ptr[0].value.?.asString(allocator) orelse return .{}; + + return .{ + .tag = .named_file, + .value = .{ + .named_file = .{ + try buf.append(bin_name), + try buf.append(value), + }, + }, + }; + }, + else => { + const current_len = extern_strings.items.len; + const num_props: usize = obj.properties.len * 2; + try extern_strings.ensureTotalCapacityPrecise( + allocator, + current_len + num_props, + ); + var new = extern_strings.items.ptr[current_len .. current_len + num_props]; + extern_strings.items.len += num_props; + + var i: usize = 0; + for (obj.properties.slice()) |bin_prop| { + const key = bin_prop.key.?; + const value = bin_prop.value.?; + const key_str = key.asString(allocator) orelse return .{}; + const value_str = value.asString(allocator) orelse return .{}; + new[i] = try buf.appendExternal(key_str); + i += 1; + new[i] = try buf.appendExternal(value_str); + i += 1; + } + if (comptime Environment.allow_assert) { + bun.assert(i == new.len); + } + return .{ + .tag = .map, + .value = .{ + .map = ExternalStringList.init(extern_strings.items, new), + }, + }; + }, + } + }, + .e_string => |str| { + if (str.data.len > 0) { + return .{ + .tag = .file, + .value = .{ + .file = try buf.append(str.data), + }, + }; + } + }, + else => {}, + } + return .{}; + } + + pub fn parseAppendFromDirectories(allocator: std.mem.Allocator, bin_expr: JSON.Expr, buf: *String.Buf) OOM!Bin { + if (bin_expr.asString(allocator)) |bin_str| { + return .{ + .tag = .dir, + .value = .{ + .dir = try buf.append(bin_str), + }, + }; + } + return .{}; + } + pub fn init() Bin { return bun.serializable(.{ .tag = .none, .value = Value.init(.{ .none = {} }) }); } diff --git a/src/install/bun.lock.zig b/src/install/bun.lock.zig new file mode 100644 index 00000000000000..9e09dcc99a8717 --- /dev/null +++ b/src/install/bun.lock.zig @@ -0,0 +1,1596 @@ +const std = @import("std"); +const bun = @import("root").bun; +const string = bun.string; +const stringZ = bun.stringZ; +const strings = bun.strings; +const URL = bun.URL; +const PackageManager = bun.install.PackageManager; +const OOM = bun.OOM; +const logger = bun.logger; +const BinaryLockfile = bun.install.Lockfile; +const JSON = bun.JSON; +const Output = bun.Output; +const Expr = bun.js_parser.Expr; +const MutableString = bun.MutableString; +const DependencySlice = BinaryLockfile.DependencySlice; +const Install = bun.install; +const Dependency = Install.Dependency; +const PackageID = Install.PackageID; +const Semver = bun.Semver; +const String = Semver.String; +const Resolution = Install.Resolution; +const PackageNameHash = Install.PackageNameHash; +const NameHashMap = BinaryLockfile.NameHashMap; +const Repository = Install.Repository; +const Progress = bun.Progress; +const Environment = bun.Environment; +const Global = bun.Global; +const LoadResult = BinaryLockfile.LoadResult; +const TruncatedPackageNameHash = Install.TruncatedPackageNameHash; +const invalid_package_id = Install.invalid_package_id; +const Npm = Install.Npm; +const ExtractTarball = @import("./extract_tarball.zig"); +const Integrity = @import("./integrity.zig").Integrity; +const Meta = BinaryLockfile.Package.Meta; +const Negatable = Npm.Negatable; +const DependencyID = Install.DependencyID; +const invalid_dependency_id = Install.invalid_dependency_id; + +/// A property key in the `packages` field of the lockfile +pub const PkgPath = struct { + raw: string, + depth: u8, + + /// raw must be valid + /// fills buf with the path to dependency in node_modules. + /// e.g. loose-envify/js-tokens@4.0.0 -> node_modules/loose-envify/node_modules/js-tokens + pub fn path(this: PkgPath, path_buf: []u8, comptime sep: u8) stringZ { + var buf = path_buf; + var remain = this.raw; + + const end = loop: while (true) { + @memcpy(buf[0.."node_modules/".len], "node_modules" ++ [1]u8{sep}); + buf = buf["node_modules/".len..]; + + var at = strings.indexOfChar(remain, '@') orelse unreachable; + var slash = strings.indexOfChar(remain, '/') orelse break :loop at; + + if (at == 0) { + // scoped package, find next '@' and '/' + at += 1 + (strings.indexOfChar(remain[1..], '@') orelse unreachable); + slash += 1 + (strings.indexOfChar(remain[slash + 1 ..], '/') orelse { + break :loop at; + }); + } + + if (at < slash) { + // slash is in the version + break :loop at; + } + + @memcpy(buf[0..slash], remain[0..slash]); + buf[slash] = sep; + buf = buf[slash + 1 ..]; + remain = remain[slash + 1 ..]; + }; + + @memcpy(buf[0..end], remain[0..end]); + buf = buf[end..]; + buf[0] = 0; + return path_buf[0 .. @intFromPtr(buf.ptr) - @intFromPtr(path_buf.ptr) :0]; + } + + pub fn reverseIterator(input: string) Iterator { + return .{ + .input = input, + .i = @intCast(input.len), + }; + } + + pub const ReverseIterator = struct { + input: string, + i: u32, + + pub fn next(this: *ReverseIterator) error{InvalidPackageKey}!?string { + if (this.i == 0) return null; + + const remain = this.input[0..this.i]; + if (remain.len == 0) return error.InvalidPackageKey; + + const slash = strings.indexOfCharNeg(remain, '/') orelse { + // the end + const name = remain; + this.i = 0; + return name; + }; + + // if this is the second component of a scoped package an '@' + // will begin the next + const at = strings.indexOfCharNeg(remain, '@') orelse { + const name = this.input[slash + 1 .. this.i]; + this.i = slash; + return name; + }; + + if (at < slash) { + return error.InvalidPackageKey; + } + + const next_slash = strings.indexOfCharNeg(remain[0..slash]) orelse { + // if `@` exists there must be another slash unless the first package + // is a scoped package + if (at != 0) { + return error.InvalidPackageKey; + } + + const name = remain; + this.i = 0; + return name; + }; + + if (next_slash + 1 != at) { + return error.InvalidPackageKey; + } + + const name = this.input[next_slash + 1 .. this.i]; + this.i = next_slash; + return name; + } + + pub fn first(this: *ReverseIterator) error{InvalidPackageKey}!string { + bun.debugAssert(this.i == this.input.len); + + return this.next() orelse return error.InvalidPackageKey; + } + }; + + pub fn iterator(input: string) Iterator { + return .{ + .input = input, + .i = 0, + }; + } + + pub const Iterator = struct { + input: string, + i: u32, + version_offset: ?u32 = null, + + pub fn next(this: *Iterator) error{InvalidPackageKey}!?string { + if (this.i == this.input.len) return null; + + var remain = this.input[this.i..]; + + var maybe_at = strings.indexOfChar(remain, '@'); + var slash = strings.indexOfChar(remain, '/') orelse { + // no slashes left, it's the last dependency name. + // '@' will only exist if '/' exists (scoped package) + if (maybe_at != null) return error.InvalidPackageKey; + this.i = @intCast(this.input.len); + return remain; + }; + + if (maybe_at == null) { + if (slash + 1 == this.input.len) return error.InvalidPackageKey; + this.i += slash + 1; + return remain[0..slash]; + } + + if (maybe_at.? == 0) { + // scoped package, find next '/' and '@' if it exists + maybe_at = strings.indexOfChar(remain[1..], '@'); + slash += 1 + (strings.indexOfChar(remain[slash + 1 ..], '/') orelse { + if (maybe_at != null) return error.InvalidPackageKey; + this.i = @intCast(this.input.len); + return remain; + }); + } + + if (maybe_at) |at| { + if (at + 1 < slash) { + // both '@' and '/' exist and it's not a scoped package, so + // '@' must be greater than '/' + return error.InvalidPackageKey; + } + } + + this.i += slash + 1; + return remain[0..slash]; + } + + /// There will always be at least one component to this path. Return + /// an error if none is found (empty string) + pub fn first(this: *Iterator) error{InvalidPackageKey}!string { + bun.assertWithLocation(this.i == 0, @src()); + return try this.next() orelse error.InvalidPackageKey; + } + }; + + pub fn fromLockfile(input: string) PkgPath { + return .{ + .raw = input, + .depth = 0, + }; + } + + pub const Map = struct { + root: Node, + + const Nodes = bun.StringArrayHashMapUnmanaged(Node); + + pub const Node = struct { + pkg_id: PackageID, + dep_id: DependencyID, + parent: ?*Node, + nodes: Nodes, + + pub fn deinit(this: *Node, allocator: std.mem.Allocator) void { + for (this.nodes.values()) |*node| { + node.deinit(allocator); + } + + this.nodes.deinit(allocator); + } + }; + + pub fn init() Map { + return .{ + .root = .{ + .pkg_id = 0, + .dep_id = invalid_dependency_id, + .parent = null, + .nodes = .{}, + }, + }; + } + + pub fn deinit(this: *Map, allocator: std.mem.Allocator) void { + for (this.root.nodes.values()) |*node| { + node.deinit(allocator); + } + } + + const InsertError = OOM || error{ + InvalidPackageKey, + DuplicatePackagePath, + }; + + pub fn insert(this: *Map, allocator: std.mem.Allocator, pkg_path: string, id: PackageID) InsertError!void { + var iter = PkgPath.iterator(pkg_path); + + var parent: ?*Node = null; + var curr: *Node = &this.root; + while (try iter.next()) |name| { + const entry = try curr.nodes.getOrPut(allocator, name); + if (!entry.found_existing) { + // probably should use String.Buf for small strings and + // deduplication. + entry.key_ptr.* = try allocator.dupe(u8, name); + entry.value_ptr.* = .{ + .pkg_id = invalid_package_id, + .dep_id = invalid_dependency_id, + .parent = parent, + .nodes = .{}, + }; + } + + parent = curr; + curr = entry.value_ptr; + } + + if (parent == null) { + return error.InvalidPackageKey; + } + + if (curr.pkg_id != invalid_package_id) { + return error.DuplicatePackagePath; + } + + curr.pkg_id = id; + } + + pub fn get(this: *Map, pkg_path: string) error{InvalidPackageKey}!?*Node { + var iter = iterator(pkg_path); + var curr: *Node = &this.root; + while (try iter.next()) |name| { + curr = curr.nodes.getPtr(name) orelse return null; + } + + return curr; + } + + pub fn iterate(this: *const Map, allocator: std.mem.Allocator) OOM!Map.Iterator { + var tree_buf: std.ArrayListUnmanaged(Map.Iterator.TreeInfo) = .{}; + try tree_buf.append(allocator, .{ + .nodes = this.root.nodes, + .pkg_id = 0, + .dep_id = BinaryLockfile.Tree.root_dep_id, + .id = 0, + .parent_id = BinaryLockfile.Tree.invalid_id, + }); + return .{ + .tree_buf = tree_buf, + .deps_buf = .{}, + }; + } + + /// Breadth-first iterator + pub const Iterator = struct { + tree_buf: std.ArrayListUnmanaged(TreeInfo), + + deps_buf: std.ArrayListUnmanaged(DependencyID), + + pub const TreeInfo = struct { + // name: String, + nodes: Nodes, + pkg_id: PackageID, + dep_id: DependencyID, + id: BinaryLockfile.Tree.Id, + parent_id: BinaryLockfile.Tree.Id, + }; + + pub const Next = struct { + id: BinaryLockfile.Tree.Id, + parent_id: BinaryLockfile.Tree.Id, + tree_dep_id: DependencyID, + dep_ids: []const DependencyID, + }; + + pub fn deinit(this: *Map.Iterator, allocator: std.mem.Allocator) void { + this.tree_buf.deinit(allocator); + this.deps_buf.deinit(allocator); + } + + pub fn next(this: *Map.Iterator, allocator: std.mem.Allocator) OOM!?Next { + if (this.tree_buf.items.len == 0) { + return null; + } + + this.deps_buf.clearRetainingCapacity(); + + var next_id = this.tree_buf.getLast().id + 1; + + // TODO(dylan-conway): try doubly linked list + const tree = this.tree_buf.orderedRemove(0); + + for (tree.nodes.values()) |node| { + if (node.nodes.count() > 0) { + try this.tree_buf.append(allocator, .{ + .nodes = node.nodes, + .id = next_id, + .parent_id = tree.id, + .pkg_id = node.pkg_id, + .dep_id = node.dep_id, + }); + next_id += 1; + } + + try this.deps_buf.append(allocator, node.dep_id); + } + + return .{ + .id = tree.id, + .parent_id = tree.parent_id, + .tree_dep_id = tree.dep_id, + .dep_ids = this.deps_buf.items, + }; + + // return tree; + // .dep_id = tree.dep_id, + // .pkg_id = tree.pkg_id, + // .id = tree.tree_id, + // .parent_id = tree.parent_id, + // .nodes = tree.nodes, + // }; + } + }; + }; +}; + +pub const Version = enum(u32) { + v0 = 0, + + // probably bump when we support nested resolutions + // v1, + + pub const current: Version = .v0; +}; + +pub const Stringifier = struct { + const indent_scalar = 2; + + // pub fn save(this: *const Lockfile) void { + // _ = this; + // } + + pub fn saveFromBinary(allocator: std.mem.Allocator, lockfile: *const BinaryLockfile) OOM!string { + var writer_buf = MutableString.initEmpty(allocator); + var buffered_writer = writer_buf.bufferedWriter(); + var writer = buffered_writer.writer(); + + const buf = lockfile.buffers.string_bytes.items; + const deps_buf = lockfile.buffers.dependencies.items; + const resolution_buf = lockfile.buffers.resolutions.items; + const pkgs = lockfile.packages.slice(); + const pkg_dep_lists: []DependencySlice = pkgs.items(.dependencies); + const pkg_resolution: []Resolution = pkgs.items(.resolution); + const pkg_names: []String = pkgs.items(.name); + const pkg_name_hashes: []PackageNameHash = pkgs.items(.name_hash); + const pkg_metas: []BinaryLockfile.Package.Meta = pkgs.items(.meta); + + var temp_buf: std.ArrayListUnmanaged(u8) = .{}; + defer temp_buf.deinit(allocator); + const temp_writer = temp_buf.writer(allocator); + + var found_trusted_dependencies: std.AutoHashMapUnmanaged(u64, String) = .{}; + defer found_trusted_dependencies.deinit(allocator); + if (lockfile.trusted_dependencies) |trusted_dependencies| { + try found_trusted_dependencies.ensureTotalCapacity(allocator, @truncate(trusted_dependencies.count())); + } + + var found_patched_dependencies: std.AutoHashMapUnmanaged(u64, struct { string, String }) = .{}; + defer found_patched_dependencies.deinit(allocator); + try found_patched_dependencies.ensureTotalCapacity(allocator, @truncate(lockfile.patched_dependencies.count())); + + var found_overrides: std.AutoHashMapUnmanaged(u64, struct { String, Dependency.Version }) = .{}; + defer found_overrides.deinit(allocator); + try found_overrides.ensureTotalCapacity(allocator, @truncate(lockfile.overrides.map.count())); + + var _indent: u32 = 0; + const indent = &_indent; + try writer.writeAll("{\n"); + try incIndent(writer, indent); + { + try writer.print("\"lockfileVersion\": {d},\n", .{@intFromEnum(Version.current)}); + try writeIndent(writer, indent); + + try writer.writeAll("\"workspaces\": {\n"); + try incIndent(writer, indent); + { + try writeWorkspaceDeps( + writer, + indent, + 0, + .{}, + pkg_names, + pkg_name_hashes, + pkg_dep_lists, + buf, + deps_buf, + lockfile.workspace_versions, + ); + for (0..pkgs.len) |pkg_id| { + const res = pkg_resolution[pkg_id]; + if (res.tag != .workspace) continue; + try writer.writeAll(",\n"); + try writeIndent(writer, indent); + try writeWorkspaceDeps( + writer, + indent, + @intCast(pkg_id), + res.value.workspace, + pkg_names, + pkg_name_hashes, + pkg_dep_lists, + buf, + deps_buf, + lockfile.workspace_versions, + ); + } + } + try writer.writeByte('\n'); + try decIndent(writer, indent); + try writer.writeAll("},\n"); + + var pkgs_iter = BinaryLockfile.Tree.Iterator(.pkg_path).init(lockfile); + + // find trusted and patched dependencies. also overrides + while (pkgs_iter.next({})) |node| { + for (node.dependencies) |dep_id| { + const pkg_id = resolution_buf[dep_id]; + if (pkg_id == invalid_package_id) continue; + + const pkg_name = pkg_names[pkg_id]; + const pkg_name_hash = pkg_name_hashes[pkg_id]; + const res = pkg_resolution[pkg_id]; + const dep = deps_buf[dep_id]; + + if (lockfile.patched_dependencies.count() > 0) { + try temp_writer.print("{s}@", .{pkg_name.slice(buf)}); + switch (res.tag) { + .workspace => { + if (lockfile.workspace_versions.get(pkg_name_hash)) |workspace_version| { + try temp_writer.print("{}", .{workspace_version.fmt(buf)}); + } + }, + else => { + try temp_writer.print("{}", .{res.fmt(buf, .posix)}); + }, + } + defer temp_buf.clearRetainingCapacity(); + + const name_and_version = temp_buf.items; + const name_and_version_hash = String.Builder.stringHash(name_and_version); + + if (lockfile.patched_dependencies.get(name_and_version_hash)) |patch| { + try found_patched_dependencies.put(allocator, name_and_version_hash, .{ + try allocator.dupe(u8, name_and_version), + patch.path, + }); + } + } + + // intentionally not checking default trusted dependencies + if (lockfile.trusted_dependencies) |trusted_dependencies| { + if (trusted_dependencies.contains(@truncate(dep.name_hash))) { + try found_trusted_dependencies.put(allocator, dep.name_hash, dep.name); + } + } + + if (lockfile.overrides.map.count() > 0) { + if (lockfile.overrides.get(dep.name_hash)) |version| { + try found_overrides.put(allocator, dep.name_hash, .{ dep.name, version }); + } + } + } + } + + pkgs_iter.reset(); + + if (found_trusted_dependencies.count() > 0) { + try writeIndent(writer, indent); + try writer.writeAll( + \\"trustedDependencies": [ + \\ + ); + indent.* += 1; + var values_iter = found_trusted_dependencies.valueIterator(); + while (values_iter.next()) |dep_name| { + try writeIndent(writer, indent); + try writer.print( + \\"{s}", + \\ + , .{dep_name.slice(buf)}); + } + + try decIndent(writer, indent); + try writer.writeAll( + \\], + \\ + ); + } + + if (found_patched_dependencies.count() > 0) { + try writeIndent(writer, indent); + try writer.writeAll( + \\"patchedDependencies": { + \\ + ); + indent.* += 1; + var values_iter = found_patched_dependencies.valueIterator(); + while (values_iter.next()) |value| { + const name_and_version, const patch_path = value.*; + try writeIndent(writer, indent); + try writer.print( + \\"{s}": "{s}", + \\ + , .{ name_and_version, patch_path.slice(buf) }); + } + + try decIndent(writer, indent); + try writer.writeAll( + \\}, + \\ + ); + } + + if (found_overrides.count() > 0) { + try writeIndent(writer, indent); + try writer.writeAll( + \\"overrides": { + \\ + ); + indent.* += 1; + var values_iter = found_overrides.valueIterator(); + while (values_iter.next()) |value| { + const name, const version = value.*; + try writeIndent(writer, indent); + try writer.print( + \\"{s}": "{s}", + \\ + , .{ name.slice(buf), version.literal.slice(buf) }); + } + + try decIndent(writer, indent); + try writer.writeAll( + \\}, + \\ + ); + } + + try writeIndent(writer, indent); + try writer.writeAll("\"packages\": {"); + var first = true; + while (pkgs_iter.next({})) |node| { + for (node.dependencies) |dep_id| { + const pkg_id = resolution_buf[dep_id]; + if (pkg_id == invalid_package_id) continue; + + const res = pkg_resolution[pkg_id]; + switch (res.tag) { + .root, .npm, .folder, .local_tarball, .github, .git, .symlink, .workspace, .remote_tarball => {}, + .uninitialized => continue, + // should not be possible, just being safe + .single_file_module => continue, + else => continue, + } + + if (first) { + first = false; + try writer.writeByte('\n'); + try incIndent(writer, indent); + } else { + try writer.writeAll(",\n"); + try writeIndent(writer, indent); + } + + try writer.writeByte('"'); + // relative_path is empty string for root resolutions + try writer.writeAll(node.relative_path); + + if (node.depth != 0) { + try writer.writeByte('/'); + } + + const dep = deps_buf[dep_id]; + const dep_name = dep.name.slice(buf); + + try writer.print("{s}\": ", .{ + dep_name, + }); + + const pkg_name = pkg_names[pkg_id].slice(buf); + const pkg_meta = pkg_metas[pkg_id]; + const pkg_deps = pkg_dep_lists[pkg_id].get(deps_buf); + + // first index is resolution for all dependency types + // npm -> [ "name@version", registry or "" (default), deps..., integrity, ... ] + // symlink -> [ "name@link:path", deps..., ... ] + // folder -> [ "name@path", deps..., ... ] + // workspace -> [ "name@workspace:path", version or "", deps..., ... ] + // tarball -> [ "name@tarball", deps..., ... ] + // root -> [ "name@root:" ] + // git -> [ "name@git+repo", deps..., ... ] + // github -> [ "name@github:user/repo", deps..., ... ] + + var optional_peers_buf = std.ArrayList(String).init(allocator); + defer optional_peers_buf.deinit(); + + switch (res.tag) { + .root => { + try writer.print("[\"{}@root:\"]", .{ + bun.fmt.formatJSONStringUTF8(pkg_name, .{ .quote = false }), + // we don't read the root package version into the binary lockfile + }); + }, + .folder => { + try writer.print("[\"{s}@file:{}\", ", .{ + pkg_name, + bun.fmt.formatJSONStringUTF8(res.value.folder.slice(buf), .{ .quote = false }), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.print(", \"{}\"]", .{pkg_meta.integrity}); + }, + .local_tarball => { + try writer.print("[\"{s}@{}\", ", .{ + pkg_name, + bun.fmt.formatJSONStringUTF8(res.value.local_tarball.slice(buf), .{ .quote = false }), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.print(", \"{}\"]", .{pkg_meta.integrity}); + }, + .remote_tarball => { + try writer.print("[\"{s}@{}\", ", .{ + pkg_name, + bun.fmt.formatJSONStringUTF8(res.value.remote_tarball.slice(buf), .{ .quote = false }), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.print(", \"{}\"]", .{pkg_meta.integrity}); + }, + .symlink => { + try writer.print("[\"{s}@link:{}\", ", .{ + pkg_name, + bun.fmt.formatJSONStringUTF8(res.value.symlink.slice(buf), .{ .quote = false }), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.print(", \"{}\"]", .{pkg_meta.integrity}); + }, + .npm => { + try writer.print("[\"{s}@{}\", ", .{ + pkg_name, + res.value.npm.version.fmt(buf), + }); + + // only write the registry if it's not the default. empty string means default registry + try writer.print("\"{s}\", ", .{ + if (strings.hasPrefixComptime(res.value.npm.url.slice(buf), strings.withoutTrailingSlash(Npm.Registry.default_url))) + "" + else + res.value.npm.url.slice(buf), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + // TODO(dylan-conway): delete placeholder + try writer.print(", \"{}\"]", .{ + pkg_meta.integrity, + }); + }, + .workspace => { + const workspace_path = res.value.workspace.slice(buf); + + try writer.print("[\"{s}@workspace:{}\", ", .{ + pkg_name, + bun.fmt.formatJSONStringUTF8(workspace_path, .{ .quote = false }), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.writeByte(']'); + }, + inline .git, .github => |tag| { + const repo: Repository = @field(res.value, @tagName(tag)); + try writer.print("[\"{s}@{}\", ", .{ + pkg_name, + repo.fmt(if (comptime tag == .git) "git+" else "github:", buf), + }); + + try writePackageDepsAndMeta(writer, pkg_deps, &pkg_meta, buf, &optional_peers_buf); + + try writer.print(", \"{}\"]", .{pkg_meta.integrity}); + }, + else => unreachable, + } + } + } + + if (!first) { + try writer.writeByte('\n'); + try decIndent(writer, indent); + } + try writer.writeAll("}\n"); + } + try decIndent(writer, indent); + try writer.writeAll("}\n"); + + try buffered_writer.flush(); + return writer_buf.list.items; + } + + /// Writes a single line object. + /// { "devDependencies": { "one": "1.1.1", "two": "2.2.2" }, "os": "none" } + fn writePackageDepsAndMeta( + writer: anytype, + deps: []const Dependency, + meta: *const Meta, + buf: string, + optional_peers_buf: *std.ArrayList(String), + ) OOM!void { + defer optional_peers_buf.clearRetainingCapacity(); + + try writer.writeByte('{'); + + var any = false; + inline for (workspace_dependency_groups) |group| { + const group_name, const group_behavior = group; + + var first = true; + for (deps) |dep| { + if (!dep.behavior.includes(group_behavior)) continue; + + if (dep.behavior.isOptionalPeer()) { + // only write to "peerDependencies" + if (group_behavior.isOptional()) continue; + + try optional_peers_buf.append(dep.name); + } + + if (first) { + if (any) { + try writer.writeByte(','); + } + try writer.writeAll(" \"" ++ group_name ++ "\": { "); + first = false; + any = true; + } else { + try writer.writeAll(", "); + } + + try writer.print("\"{s}\": \"{s}\"", .{ + dep.name.slice(buf), + dep.version.literal.slice(buf), + }); + } + + if (!first) { + try writer.writeAll(" }"); + } + } + + if (optional_peers_buf.items.len > 0) { + bun.debugAssert(any); + try writer.writeAll( + \\, "optionalPeers": [ + ); + + for (optional_peers_buf.items, 0..) |optional_peer, i| { + try writer.print( + \\"{s}{s}{s}", + , .{ + if (i != 0) "," else "", + optional_peer.slice(buf), + if (i != optional_peers_buf.items.len) " " else "", + }); + } + + try writer.writeByte(']'); + } + + // TODO(dylan-conway) + // if (meta.libc != .all) { + // try writer.writeAll( + // \\"libc": [ + // ); + // try Negatable(Npm.Libc).toJson(meta.libc, writer); + // try writer.writeAll("], "); + // } + + if (meta.os != .all) { + if (any) { + try writer.writeByte(','); + } else { + any = true; + } + try writer.writeAll( + \\ "os": + ); + try Negatable(Npm.OperatingSystem).toJson(meta.os, writer); + } + + if (meta.arch != .all) { + if (any) { + try writer.writeByte(','); + } else { + any = true; + } + try writer.writeAll( + \\ "cpu": + ); + try Negatable(Npm.Architecture).toJson(meta.arch, writer); + } + + if (any) { + try writer.writeAll(" }"); + } else { + try writer.writeByte('}'); + } + } + + fn writeWorkspaceDeps( + writer: anytype, + indent: *u32, + pkg_id: PackageID, + res: String, + pkg_names: []const String, + pkg_name_hashes: []const PackageNameHash, + pkg_deps: []const DependencySlice, + buf: string, + deps_buf: []const Dependency, + workspace_versions: BinaryLockfile.VersionHashMap, + ) OOM!void { + // any - have any properties been written + var any = false; + + // always print the workspace key even if it doesn't have dependencies because we + // need a way to detect new/deleted workspaces + if (pkg_id == 0) { + try writer.writeAll("\"\": {"); + } else { + try writer.print("{}: {{", .{ + bun.fmt.formatJSONStringUTF8(res.slice(buf), .{}), + }); + try writer.writeByte('\n'); + try incIndent(writer, indent); + try writer.print("\"name\": \"{s}\"", .{ + pkg_names[pkg_id].slice(buf), + }); + + if (workspace_versions.get(pkg_name_hashes[pkg_id])) |version| { + try writer.writeAll(",\n"); + try writeIndent(writer, indent); + try writer.print("\"version\": \"{}\"", .{ + version.fmt(buf), + }); + } + + any = true; + } + + inline for (workspace_dependency_groups) |group| { + const group_name, const group_behavior = group; + + var first = true; + for (pkg_deps[pkg_id].get(deps_buf)) |dep| { + if (!dep.behavior.includes(group_behavior)) continue; + + if (first) { + if (any) { + try writer.writeByte(','); + } + try writer.writeByte('\n'); + if (any) { + try writeIndent(writer, indent); + } else { + try incIndent(writer, indent); + } + try writer.writeAll("\"" ++ group_name ++ "\": {\n"); + try incIndent(writer, indent); + any = true; + first = false; + } else { + try writer.writeAll(",\n"); + try writeIndent(writer, indent); + } + + const name = dep.name.slice(buf); + const version = dep.version.literal.slice(buf); + + try writer.print("\"{s}\": \"{s}\"", .{ name, version }); + } + + if (!first) { + try writer.writeByte('\n'); + try decIndent(writer, indent); + try writer.writeAll("}"); + } + } + if (any) { + try writer.writeByte('\n'); + try decIndent(writer, indent); + } + try writer.writeAll("}"); + } + + fn writeIndent(writer: anytype, indent: *const u32) OOM!void { + for (0..indent.*) |_| { + try writer.writeAll(" " ** indent_scalar); + } + } + + fn incIndent(writer: anytype, indent: *u32) OOM!void { + indent.* += 1; + for (0..indent.*) |_| { + try writer.writeAll(" " ** indent_scalar); + } + } + + fn decIndent(writer: anytype, indent: *u32) OOM!void { + indent.* -= 1; + for (0..indent.*) |_| { + try writer.writeAll(" " ** indent_scalar); + } + } +}; + +const dependency_groups = [3]struct { []const u8, Dependency.Behavior }{ + .{ "dependencies", Dependency.Behavior.normal }, + .{ "peerDependencies", Dependency.Behavior.normal }, + .{ "optionalDependencies", Dependency.Behavior.normal }, +}; + +const workspace_dependency_groups = [4]struct { []const u8, Dependency.Behavior }{ + .{ "dependencies", Dependency.Behavior.normal }, + .{ "devDependencies", Dependency.Behavior.dev }, + .{ "peerDependencies", Dependency.Behavior.peer }, + .{ "optionalDependencies", Dependency.Behavior.optional }, +}; + +const ParseError = OOM || error{ + InvalidLockfileVersion, + InvalidOptionalValue, + InvalidPeerValue, + InvalidDefaultRegistry, + InvalidPatchedDependencies, + InvalidPatchedDependency, + InvalidWorkspaceObject, + InvalidPackagesObject, + InvalidPackagesProp, + InvalidPackageKey, + InvalidPackageInfo, + InvalidPackageSpecifier, + InvalidSemver, + InvalidPackagesTree, + InvalidTrustedDependenciesSet, + InvalidOverridesObject, + InvalidDependencyName, + InvalidDependencyVersion, + InvalidPackageResolution, + UnexpectedResolution, +}; + +pub fn parseIntoBinaryLockfile( + lockfile: *BinaryLockfile, + allocator: std.mem.Allocator, + root: JSON.Expr, + source: *const logger.Source, + log: *logger.Log, + manager: ?*PackageManager, +) ParseError!void { + var temp_buf: std.ArrayListUnmanaged(u8) = .{}; + defer temp_buf.deinit(allocator); + + lockfile.initEmpty(allocator); + + const lockfile_version_expr = root.get("lockfileVersion") orelse { + try log.addError(source, root.loc, "Missing lockfile version"); + return error.InvalidLockfileVersion; + }; + + const lockfile_version: u32 = switch (lockfile_version_expr.data) { + .e_number => |num| @intFromFloat(std.math.divExact(f64, num.value, 1) catch return error.InvalidLockfileVersion), + else => return error.InvalidLockfileVersion, + }; + + lockfile.text_lockfile_version = std.meta.intToEnum(Version, lockfile_version) catch { + try log.addError(source, lockfile_version_expr.loc, "Unknown lockfile version"); + return error.InvalidLockfileVersion; + }; + + var string_buf = String.Buf.init(allocator); + + if (root.get("trustedDependencies")) |trusted_dependencies_expr| { + var trusted_dependencies: BinaryLockfile.TrustedDependenciesSet = .{}; + if (!trusted_dependencies_expr.isArray()) { + try log.addError(source, trusted_dependencies_expr.loc, "Expected an array"); + return error.InvalidTrustedDependenciesSet; + } + + for (trusted_dependencies_expr.data.e_array.items.slice()) |dep| { + if (!dep.isString()) { + try log.addError(source, dep.loc, "Expected a string"); + return error.InvalidTrustedDependenciesSet; + } + const name_hash: TruncatedPackageNameHash = @truncate((try dep.asStringHash(allocator, String.Builder.stringHash)).?); + try trusted_dependencies.put(allocator, name_hash, {}); + } + + lockfile.trusted_dependencies = trusted_dependencies; + } + + if (root.get("patchedDependencies")) |patched_dependencies_expr| { + if (!patched_dependencies_expr.isObject()) { + try log.addError(source, patched_dependencies_expr.loc, "Expected an object"); + return error.InvalidPatchedDependencies; + } + + for (patched_dependencies_expr.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + if (!key.isString()) { + try log.addError(source, key.loc, "Expected a string"); + return error.InvalidPatchedDependencies; + } + + if (!value.isString()) { + try log.addError(source, value.loc, "Expected a string"); + return error.InvalidPatchedDependencies; + } + + const key_hash = (try key.asStringHash(allocator, String.Builder.stringHash)).?; + try lockfile.patched_dependencies.put( + allocator, + key_hash, + .{ .path = try string_buf.append(value.asString(allocator).?) }, + ); + } + } + + if (root.get("overrides")) |overrides_expr| { + if (!overrides_expr.isObject()) { + try log.addError(source, overrides_expr.loc, "Expected an object"); + return error.InvalidOverridesObject; + } + + for (overrides_expr.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + + if (!key.isString() or key.data.e_string.len() == 0) { + try log.addError(source, key.loc, "Expected a non-empty string"); + return error.InvalidOverridesObject; + } + + const name_str = key.asString(allocator).?; + const name_hash = String.Builder.stringHash(name_str); + const name = try string_buf.appendWithHash(name_str, name_hash); + + // TODO(dylan-conway) also accept object when supported + if (!value.isString()) { + try log.addError(source, value.loc, "Expected a string"); + return error.InvalidOverridesObject; + } + + const version_str = value.asString(allocator).?; + const version_hash = String.Builder.stringHash(version_str); + const version = try string_buf.appendWithHash(version_str, version_hash); + const version_sliced = version.sliced(string_buf.bytes.items); + + const dep: Dependency = .{ + .name = name, + .name_hash = name_hash, + .version = Dependency.parse( + allocator, + name, + name_hash, + version_sliced.slice, + &version_sliced, + log, + manager, + ) orelse { + try log.addError(source, value.loc, "Invalid override version"); + return error.InvalidOverridesObject; + }, + }; + + try lockfile.overrides.map.put(allocator, name_hash, dep); + } + } + + const workspaces = root.getObject("workspaces") orelse { + try log.addError(source, root.loc, "Missing a workspaces object property"); + return error.InvalidWorkspaceObject; + }; + + var maybe_root_pkg: ?Expr = null; + + for (workspaces.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value: Expr = prop.value.?; + if (!key.isString()) { + try log.addError(source, key.loc, "Expected a string"); + return error.InvalidWorkspaceObject; + } + if (!value.isObject()) { + try log.addError(source, value.loc, "Expected an object"); + return error.InvalidWorkspaceObject; + } + + const path = key.asString(allocator).?; + + if (path.len == 0) { + if (maybe_root_pkg != null) { + try log.addError(source, key.loc, "Duplicate root package"); + return error.InvalidWorkspaceObject; + } + + maybe_root_pkg = value; + continue; + } + + const name_expr: Expr = value.get("name") orelse { + try log.addError(source, value.loc, "Expected a string name property"); + return error.InvalidWorkspaceObject; + }; + + const name_hash = try name_expr.asStringHash(allocator, String.Builder.stringHash) orelse { + try log.addError(source, name_expr.loc, "Expected a string name property"); + return error.InvalidWorkspaceObject; + }; + + try lockfile.workspace_paths.put(allocator, name_hash, try string_buf.append(path)); + + // versions are optional + if (value.get("version")) |version_expr| { + if (!version_expr.isString()) { + try log.addError(source, version_expr.loc, "Expected a string version property"); + return error.InvalidWorkspaceObject; + } + + const version_str = try string_buf.append(version_expr.asString(allocator).?); + + const parsed = Semver.Version.parse(version_str.sliced(string_buf.bytes.items)); + if (!parsed.valid) { + try log.addError(source, version_expr.loc, "Invalid semver version"); + return error.InvalidSemver; + } + + try lockfile.workspace_versions.put(allocator, name_hash, parsed.version.min()); + } + } + + var optional_peers_buf: std.AutoHashMapUnmanaged(u64, void) = .{}; + defer optional_peers_buf.deinit(allocator); + + if (maybe_root_pkg) |root_pkg| { + // TODO(dylan-conway): maybe sort this. behavior is already sorted, but names are not + const maybe_name = if (root_pkg.get("name")) |name| name.asString(allocator) orelse { + try log.addError(source, name.loc, "Expected a string"); + return error.InvalidWorkspaceObject; + } else null; + + const off, const len = try parseAppendDependencies(lockfile, allocator, &root_pkg, &string_buf, log, source, &optional_peers_buf); + + var pkg: BinaryLockfile.Package = .{}; + pkg.meta.id = 0; + + if (maybe_name) |name| { + const name_hash = String.Builder.stringHash(name); + pkg.name = try string_buf.appendWithHash(name, name_hash); + pkg.name_hash = name_hash; + } + + pkg.dependencies = .{ .off = off, .len = len }; + pkg.resolutions = .{ .off = off, .len = len }; + + try lockfile.packages.append(allocator, pkg); + } else { + try log.addError(source, workspaces.loc, "Expected root package"); + return error.InvalidWorkspaceObject; + } + + var pkg_map = PkgPath.Map.init(); + defer pkg_map.deinit(allocator); + + if (root.get("packages")) |pkgs_expr| { + if (!pkgs_expr.isObject()) { + try log.addError(source, pkgs_expr.loc, "Expected an object"); + return error.InvalidPackagesObject; + } + + for (pkgs_expr.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + + const pkg_path = key.asString(allocator) orelse { + try log.addError(source, key.loc, "Expected a string"); + return error.InvalidPackageKey; + }; + + if (!value.isArray()) { + try log.addError(source, value.loc, "Expected an array"); + return error.InvalidPackageInfo; + } + + var i: usize = 0; + const pkg_info = value.data.e_array.items; + + if (pkg_info.len == 0) { + try log.addError(source, value.loc, "Missing package info"); + return error.InvalidPackageInfo; + } + + const res_info = pkg_info.at(i); + i += 1; + + const res_info_str = res_info.asString(allocator) orelse { + try log.addError(source, res_info.loc, "Expected a string"); + return error.InvalidPackageResolution; + }; + + const name_str, const res_str = Dependency.splitNameAndVersion(res_info_str) catch { + try log.addError(source, res_info.loc, "Invalid package resolution"); + return error.InvalidPackageResolution; + }; + + const name_hash = String.Builder.stringHash(name_str); + const name = try string_buf.append(name_str); + + var res = Resolution.fromTextLockfile(res_str, &string_buf) catch |err| switch (err) { + error.OutOfMemory => return err, + error.UnexpectedResolution => { + try log.addErrorFmt(source, res_info.loc, allocator, "Unexpected resolution: {s}", .{res_str}); + return err; + }, + error.InvalidSemver => { + try log.addErrorFmt(source, res_info.loc, allocator, "Invalid package version: {s}", .{res_str}); + return err; + }, + }; + + if (res.tag == .npm) { + if (pkg_info.len < 2) { + try log.addError(source, value.loc, "Missing npm registry"); + return error.InvalidPackageInfo; + } + + const registry_expr = pkg_info.at(i); + i += 1; + + const registry_str = registry_expr.asString(allocator) orelse { + try log.addError(source, registry_expr.loc, "Expected a string"); + return error.InvalidPackageInfo; + }; + + if (registry_str.len == 0) { + const url = try ExtractTarball.buildURL( + Npm.Registry.default_url, + strings.StringOrTinyString.init(name.slice(string_buf.bytes.items)), + res.value.npm.version, + string_buf.bytes.items, + ); + + res.value.npm.url = try string_buf.append(url); + } else { + res.value.npm.url = try string_buf.append(registry_str); + } + } + + var pkg: BinaryLockfile.Package = .{}; + + // dependencies, os, cpu, libc + switch (res.tag) { + .npm, .folder, .git, .github, .local_tarball, .remote_tarball, .symlink, .workspace => { + const deps_os_cpu_libc_obj = pkg_info.at(i); + i += 1; + if (!deps_os_cpu_libc_obj.isObject()) { + try log.addError(source, deps_os_cpu_libc_obj.loc, "Expected an object"); + return error.InvalidPackageInfo; + } + + // TODO(dylan-conway): maybe sort this. behavior is already sorted, but names are not + const off, const len = try parseAppendDependencies(lockfile, allocator, deps_os_cpu_libc_obj, &string_buf, log, source, &optional_peers_buf); + + pkg.dependencies = .{ .off = off, .len = len }; + pkg.resolutions = .{ .off = off, .len = len }; + + if (res.tag != .workspace) { + if (deps_os_cpu_libc_obj.get("os")) |os| { + pkg.meta.os = try Negatable(Npm.OperatingSystem).fromJson(allocator, os); + } + if (deps_os_cpu_libc_obj.get("cpu")) |arch| { + pkg.meta.arch = try Negatable(Npm.Architecture).fromJson(allocator, arch); + } + // TODO(dylan-conway) + // if (os_cpu_libc_obj.get("libc")) |libc| { + // pkg.meta.libc = Negatable(Npm.Libc).fromJson(allocator, libc); + // } + } + }, + else => {}, + } + + // integrity + switch (res.tag) { + .npm, .git, .github => { + const integrity_expr = pkg_info.at(i); + i += 1; + const integrity_str = integrity_expr.asString(allocator) orelse { + try log.addError(source, integrity_expr.loc, "Expected a string"); + return error.InvalidPackageInfo; + }; + + pkg.meta.integrity = Integrity.parse(integrity_str); + }, + else => {}, + } + + pkg.name = name; + pkg.name_hash = name_hash; + pkg.resolution = res; + + // set later + pkg.bin = .{ + .unset = 1, + }; + pkg.scripts = .{}; + + const pkg_id = try lockfile.appendPackageDedupe(&pkg, string_buf.bytes.items); + + pkg_map.insert(allocator, pkg_path, pkg_id) catch |err| { + switch (err) { + error.OutOfMemory => |oom| return oom, + error.DuplicatePackagePath => { + try log.addError(source, key.loc, "Duplicate package path"); + }, + error.InvalidPackageKey => { + try log.addError(source, key.loc, "Invalid package path"); + }, + } + return error.InvalidPackageKey; + }; + } + + try lockfile.buffers.resolutions.ensureTotalCapacityPrecise(allocator, lockfile.buffers.dependencies.items.len); + lockfile.buffers.resolutions.expandToCapacity(); + @memset(lockfile.buffers.resolutions.items, invalid_package_id); + + const pkgs = lockfile.packages.slice(); + const pkg_names = pkgs.items(.name); + _ = pkg_names; + const pkg_name_hashes = pkgs.items(.name_hash); + _ = pkg_name_hashes; + const pkg_deps = pkgs.items(.dependencies); + var pkg_metas = pkgs.items(.meta); + var pkg_resolutions = pkgs.items(.resolution); + const pkg_resolution_lists = pkgs.items(.resolutions); + _ = pkg_resolution_lists; + + { + // root pkg + pkg_resolutions[0] = Resolution.init(.{ .root = {} }); + pkg_metas[0].origin = .local; + + for (pkg_deps[0].begin()..pkg_deps[0].end()) |_dep_id| { + const dep_id: DependencyID = @intCast(_dep_id); + const dep = lockfile.buffers.dependencies.items[dep_id]; + + if (pkg_map.root.nodes.getPtr(dep.name.slice(string_buf.bytes.items))) |dep_node| { + dep_node.dep_id = dep_id; + lockfile.buffers.resolutions.items[dep_id] = dep_node.pkg_id; + } + } + } + + for (pkgs_expr.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + + const pkg_path = key.asString(allocator).?; + const i: usize = 0; + _ = i; + const pkg_info = value.data.e_array.items; + _ = pkg_info; + + const pkg_map_entry = try pkg_map.get(pkg_path) orelse { + return error.InvalidPackagesObject; + }; + + const pkg_id = pkg_map_entry.pkg_id; + + // find resolutions. iterate up to root through the pkg path. + deps: for (pkg_deps[pkg_id].begin()..pkg_deps[pkg_id].end()) |_dep_id| { + const dep_id: DependencyID = @intCast(_dep_id); + const dep = lockfile.buffers.dependencies.items[dep_id]; + + var curr: ?*PkgPath.Map.Node = pkg_map_entry; + while (curr) |node| { + if (node.nodes.getPtr(dep.name.slice(string_buf.bytes.items))) |dep_node| { + + // it doesn't matter which dependency is assigned to this node. the dependency + // id will only be used for getting the dependency name + dep_node.dep_id = dep_id; + lockfile.buffers.resolutions.items[dep_id] = dep_node.pkg_id; + + continue :deps; + } + curr = node.parent orelse if (curr != &pkg_map.root) &pkg_map.root else null; + } + } + } + + { + // ids are assigned, now flatten into `lockfile.buffers.trees` and `lockfile.buffers.hoisted_dependencies` + var tree_iter = try pkg_map.iterate(allocator); + defer tree_iter.deinit(allocator); + var tree_id: BinaryLockfile.Tree.Id = 0; + while (try tree_iter.next(allocator)) |tree| { + bun.debugAssert(tree_id == tree.id); + const deps_off: u32 = @intCast(lockfile.buffers.hoisted_dependencies.items.len); + const deps_len: u32 = @intCast(tree.dep_ids.len); + try lockfile.buffers.hoisted_dependencies.appendSlice(allocator, tree.dep_ids); + try lockfile.buffers.trees.append( + allocator, + .{ + .dependency_id = tree.tree_dep_id, + .id = tree_id, + .parent = tree.parent_id, + .dependencies = .{ + .off = deps_off, + .len = deps_len, + }, + }, + ); + + tree_id += 1; + } + } + } + + lockfile.buffers.string_bytes = string_buf.bytes.moveToUnmanaged(); + lockfile.string_pool = string_buf.pool; +} + +fn parseAppendDependencies( + lockfile: *BinaryLockfile, + allocator: std.mem.Allocator, + obj: *const Expr, + buf: *String.Buf, + log: *logger.Log, + source: *const logger.Source, + optional_peers_buf: *std.AutoHashMapUnmanaged(u64, void), +) ParseError!struct { u32, u32 } { + defer optional_peers_buf.clearRetainingCapacity(); + + if (obj.get("optionalPeers")) |optional_peers| { + if (!optional_peers.isArray()) { + try log.addError(source, optional_peers.loc, "Expected an array"); + return error.InvalidPackageInfo; + } + + for (optional_peers.data.e_array.items.slice()) |item| { + const name_hash = try item.asStringHash(allocator, String.Builder.stringHash) orelse { + try log.addError(source, item.loc, "Expected a string"); + return error.InvalidPackageInfo; + }; + + try optional_peers_buf.put(allocator, name_hash, {}); + } + } + + const off = lockfile.buffers.dependencies.items.len; + inline for (workspace_dependency_groups) |dependency_group| { + const group_name, const group_behavior = dependency_group; + if (obj.get(group_name)) |deps| { + if (!deps.isObject()) { + try log.addError(source, deps.loc, "Expected an object"); + return error.InvalidPackagesTree; + } + + for (deps.data.e_object.properties.slice()) |prop| { + const key = prop.key.?; + const value = prop.value.?; + + const name_str = key.asString(allocator) orelse { + try log.addError(source, key.loc, "Expected a string"); + return error.InvalidDependencyName; + }; + + const name_hash = String.Builder.stringHash(name_str); + const name = try buf.appendExternalWithHash(name_str, name_hash); + + const version_str = value.asString(allocator) orelse { + try log.addError(source, value.loc, "Expected a string"); + return error.InvalidDependencyVersion; + }; + + const version = try buf.append(version_str); + const version_sliced = version.sliced(buf.bytes.items); + + var dep: Dependency = .{ + .name = name.value, + .name_hash = name.hash, + .behavior = group_behavior, + .version = Dependency.parse( + allocator, + name.value, + name.hash, + version_sliced.slice, + &version_sliced, + log, + null, + ) orelse { + try log.addError(source, value.loc, "Invalid dependency version"); + return error.InvalidDependencyVersion; + }, + }; + + if (dep.behavior.isPeer() and optional_peers_buf.contains(name.hash)) { + dep.behavior.optional = true; + } + + try lockfile.buffers.dependencies.append(allocator, dep); + } + } + } + const end = lockfile.buffers.dependencies.items.len; + + return .{ @intCast(off), @intCast(end - off) }; +} diff --git a/src/install/dependency.zig b/src/install/dependency.zig index 6f3667988e4f7a..faeb476d2d128d 100644 --- a/src/install/dependency.zig +++ b/src/install/dependency.zig @@ -265,7 +265,7 @@ pub inline fn isRemoteTarball(dependency: string) bool { } /// Turns `foo@1.1.1` into `foo`, `1.1.1`, or `@foo/bar@1.1.1` into `@foo/bar`, `1.1.1`, or `foo` into `foo`, `null`. -pub fn splitNameAndVersion(str: string) struct { string, ?string } { +pub fn splitNameAndMaybeVersion(str: string) struct { string, ?string } { if (strings.indexOfChar(str, '@')) |at_index| { if (at_index != 0) { return .{ str[0..at_index], if (at_index + 1 < str.len) str[at_index + 1 ..] else null }; @@ -279,6 +279,14 @@ pub fn splitNameAndVersion(str: string) struct { string, ?string } { return .{ str, null }; } +pub fn splitNameAndVersion(str: string) error{MissingVersion}!struct { string, string } { + const name, const version = splitNameAndMaybeVersion(str); + return .{ + name, + version orelse return error.MissingVersion, + }; +} + pub fn unscopedPackageName(name: []const u8) []const u8 { if (name[0] != '@') return name; var name_ = name; @@ -1375,6 +1383,10 @@ pub const Behavior = packed struct(u8) { return @as(u8, @bitCast(lhs)) == @as(u8, @bitCast(rhs)); } + pub inline fn includes(lhs: Behavior, rhs: Behavior) bool { + return @as(u8, @bitCast(lhs)) & @as(u8, @bitCast(rhs)) != 0; + } + pub inline fn cmp(lhs: Behavior, rhs: Behavior) std.math.Order { if (eq(lhs, rhs)) { return .eq; diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index 8ca72a1fc85472..8959383769515d 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -18,6 +18,7 @@ const strings = @import("../string_immutable.zig"); const Path = @import("../resolver/resolve_path.zig"); const Environment = bun.Environment; const w = std.os.windows; +const OOM = bun.OOM; const ExtractTarball = @This(); @@ -60,43 +61,12 @@ pub fn buildURL( string_buf, @TypeOf(FileSystem.instance.dirname_store), string, - anyerror, + OOM, FileSystem.instance.dirname_store, FileSystem.DirnameStore.print, ); } -pub fn buildURLWithWriter( - comptime Writer: type, - writer: Writer, - registry_: string, - full_name_: strings.StringOrTinyString, - version: Semver.Version, - string_buf: []const u8, -) !void { - const Printer = struct { - writer: Writer, - - pub fn print(this: @This(), comptime fmt: string, args: anytype) Writer.Error!void { - return try std.fmt.format(this.writer, fmt, args); - } - }; - - return try buildURLWithPrinter( - registry_, - full_name_, - version, - string_buf, - Printer, - void, - Writer.Error, - Printer{ - .writer = writer, - }, - Printer.print, - ); -} - pub fn buildURLWithPrinter( registry_: string, full_name_: strings.StringOrTinyString, diff --git a/src/install/install.zig b/src/install/install.zig index d1b14b2ae6fe31..2ff59a506036b2 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -24,6 +24,7 @@ const DirInfo = @import("../resolver/dir_info.zig"); const File = bun.sys.File; const JSLexer = bun.js_lexer; const logger = bun.logger; +const OOM = bun.OOM; const js_parser = bun.js_parser; const JSON = bun.JSON; @@ -147,7 +148,7 @@ const ExternalString = Semver.ExternalString; const String = Semver.String; const GlobalStringBuilder = @import("../string_builder.zig"); const SlicedString = Semver.SlicedString; -const Repository = @import("./repository.zig").Repository; +pub const Repository = @import("./repository.zig").Repository; pub const Bin = @import("./bin.zig").Bin; pub const Dependency = @import("./dependency.zig"); const Behavior = @import("./dependency.zig").Behavior; @@ -211,6 +212,7 @@ pub fn ExternalSliceAligned(comptime Type: type, comptime alignment_: ?u29) type pub const PackageID = u32; pub const DependencyID = u32; pub const invalid_package_id = std.math.maxInt(PackageID); +pub const invalid_dependency_id = std.math.maxInt(DependencyID); pub const ExternalStringList = ExternalSlice(ExternalString); pub const VersionSlice = ExternalSlice(Semver.Version); @@ -988,12 +990,12 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { progress: ProgressT, - package_name: string, + package_name: String, package_version: string, patch: Patch = .{}, file_count: u32 = 0, node_modules: *const PackageManager.NodeModulesFolder, - lockfile: *const Lockfile, + lockfile: *Lockfile, const ThisPackageInstall = @This(); @@ -1093,7 +1095,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { fn verifyPatchHash( this: *@This(), root_node_modules_dir: std.fs.Dir, - ) bool { + ) VerifyResult { bun.debugAssert(!this.patch.isNull()); // hash from the .patch file, to be checked against bun tag @@ -1106,20 +1108,22 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { bunhashtag, }, .posix); - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return .{}; defer { if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); } if (comptime bun.Environment.isPosix) { - _ = bun.sys.fstatat(bun.toFD(destination_dir.fd), patch_tag_path).unwrap() catch return false; + _ = bun.sys.fstatat(bun.toFD(destination_dir.fd), patch_tag_path).unwrap() catch return .{}; } else { switch (bun.sys.openat(bun.toFD(destination_dir.fd), patch_tag_path, bun.O.RDONLY, 0)) { - .err => return false, + .err => return .{}, .result => |fd| _ = bun.sys.close(fd), } } - return true; + return .{ + .valid = true, + }; } // 1. verify that .bun-tag exists (was it installed from bun?) @@ -1127,9 +1131,8 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { fn verifyGitResolution( this: *@This(), repo: *const Repository, - buf: []const u8, root_node_modules_dir: std.fs.Dir, - ) bool { + ) VerifyResult { bun.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ ".bun-tag"); this.destination_dir_subpath_buf[this.destination_dir_subpath.len + std.fs.path.sep_str.len + ".bun-tag".len] = 0; const bun_tag_path: [:0]u8 = this.destination_dir_subpath_buf[0 .. this.destination_dir_subpath.len + std.fs.path.sep_str.len + ".bun-tag".len :0]; @@ -1137,7 +1140,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { var git_tag_stack_fallback = std.heap.stackFallback(2048, bun.default_allocator); const allocator = git_tag_stack_fallback.get(); - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return .{}; defer { if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); } @@ -1146,120 +1149,138 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { destination_dir, bun_tag_path, allocator, - ).unwrap() catch return false; + ).unwrap() catch return .{}; defer allocator.free(bun_tag_file); - return strings.eqlLong(repo.resolved.slice(buf), bun_tag_file, true); + return .{ + .valid = strings.eqlLong(repo.resolved.slice(this.lockfile.buffers.string_bytes.items), bun_tag_file, true), + }; } pub fn verify( this: *@This(), resolution: *const Resolution, - buf: []const u8, root_node_modules_dir: std.fs.Dir, - ) bool { + bin: *Bin, + ) VerifyResult { const verified = switch (resolution.tag) { - .git => this.verifyGitResolution(&resolution.value.git, buf, root_node_modules_dir), - .github => this.verifyGitResolution(&resolution.value.github, buf, root_node_modules_dir), + .git => this.verifyGitResolution(&resolution.value.git, root_node_modules_dir), + .github => this.verifyGitResolution(&resolution.value.github, root_node_modules_dir), .root => this.verifyTransitiveSymlinkedFolder(root_node_modules_dir), .folder => if (this.lockfile.isWorkspaceTreeId(this.node_modules.tree_id)) - this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag) + this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag, bin) else this.verifyTransitiveSymlinkedFolder(root_node_modules_dir), - else => this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag), + else => this.verifyPackageJSONNameAndVersion(root_node_modules_dir, resolution.tag, bin), }; if (comptime kind == .patch) return verified; if (this.patch.isNull()) return verified; - if (!verified) return false; + if (!verified.valid) return verified; return this.verifyPatchHash(root_node_modules_dir); } // Only check for destination directory in node_modules. We can't use package.json because // it might not exist - fn verifyTransitiveSymlinkedFolder(this: *@This(), root_node_modules_dir: std.fs.Dir) bool { - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; + fn verifyTransitiveSymlinkedFolder(this: *@This(), root_node_modules_dir: std.fs.Dir) VerifyResult { + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return .{}; defer destination_dir.close(); - return bun.sys.directoryExistsAt(destination_dir.fd, this.destination_dir_subpath).unwrap() catch false; + const exists = bun.sys.directoryExistsAt(destination_dir.fd, this.destination_dir_subpath).unwrap() catch return .{}; + return if (exists) .{ .valid = true } else .{}; } - fn verifyPackageJSONNameAndVersion(this: *PackageInstall, root_node_modules_dir: std.fs.Dir, resolution_tag: Resolution.Tag) bool { - const allocator = this.allocator; + const VerifyResult = struct { + valid: bool = false, + update_lockfile_pointers: bool = false, + }; + + fn getInstalledPackageJsonSource( + this: *PackageInstall, + root_node_modules_dir: std.fs.Dir, + mutable: *MutableString, + resolution_tag: Resolution.Tag, + ) ?logger.Source { var total: usize = 0; var read: usize = 0; + mutable.reset(); + mutable.list.expandToCapacity(); + bun.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ "package.json"); + this.destination_dir_subpath_buf[this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len] = 0; + const package_json_path: [:0]u8 = this.destination_dir_subpath_buf[0 .. this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len :0]; + defer this.destination_dir_subpath_buf[this.destination_dir_subpath.len] = 0; - var body_pool = Npm.Registry.BodyPool.get(allocator); - var mutable: MutableString = body_pool.data; + var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return null; defer { - body_pool.data = mutable; - Npm.Registry.BodyPool.release(body_pool); + if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); } - // Read the file - // Return false on any error. - // Don't keep it open while we're parsing the JSON. - // The longer the file stays open, the more likely it causes issues for - // other processes on Windows. - const source = brk: { - mutable.reset(); + var package_json_file = File.openat(destination_dir, package_json_path, bun.O.RDONLY, 0).unwrap() catch return null; + defer package_json_file.close(); + + // Heuristic: most package.jsons will be less than 2048 bytes. + read = package_json_file.read(mutable.list.items[total..]).unwrap() catch return null; + var remain = mutable.list.items[@min(total, read)..]; + if (read > 0 and remain.len < 1024) { + mutable.growBy(4096) catch return null; mutable.list.expandToCapacity(); - bun.copy(u8, this.destination_dir_subpath_buf[this.destination_dir_subpath.len..], std.fs.path.sep_str ++ "package.json"); - this.destination_dir_subpath_buf[this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len] = 0; - const package_json_path: [:0]u8 = this.destination_dir_subpath_buf[0 .. this.destination_dir_subpath.len + std.fs.path.sep_str.len + "package.json".len :0]; - defer this.destination_dir_subpath_buf[this.destination_dir_subpath.len] = 0; + } - var destination_dir = this.node_modules.openDir(root_node_modules_dir) catch return false; - defer { - if (std.fs.cwd().fd != destination_dir.fd) destination_dir.close(); - } + while (read > 0) : (read = package_json_file.read(remain).unwrap() catch return null) { + total += read; - var package_json_file = File.openat(destination_dir, package_json_path, bun.O.RDONLY, 0).unwrap() catch return false; - defer package_json_file.close(); + mutable.list.expandToCapacity(); + remain = mutable.list.items[total..]; - // Heuristic: most package.jsons will be less than 2048 bytes. - read = package_json_file.read(mutable.list.items[total..]).unwrap() catch return false; - var remain = mutable.list.items[@min(total, read)..]; - if (read > 0 and remain.len < 1024) { - mutable.growBy(4096) catch return false; - mutable.list.expandToCapacity(); + if (remain.len < 1024) { + mutable.growBy(4096) catch return null; } + mutable.list.expandToCapacity(); + remain = mutable.list.items[total..]; + } - while (read > 0) : (read = package_json_file.read(remain).unwrap() catch return false) { - total += read; - - mutable.list.expandToCapacity(); - remain = mutable.list.items[total..]; + // If it's not long enough to have {"name": "foo", "version": "1.2.0"}, there's no way it's valid + const minimum = if (resolution_tag == .workspace and this.package_version.len == 0) + // workspaces aren't required to have a version + "{\"name\":\"\"}".len + this.package_name.len() + else + "{\"name\":\"\",\"version\":\"\"}".len + this.package_name.len() + this.package_version.len; - if (remain.len < 1024) { - mutable.growBy(4096) catch return false; - } - mutable.list.expandToCapacity(); - remain = mutable.list.items[total..]; - } + if (total < minimum) return null; - // If it's not long enough to have {"name": "foo", "version": "1.2.0"}, there's no way it's valid - const minimum = if (resolution_tag == .workspace and this.package_version.len == 0) - // workspaces aren't required to have a version - "{\"name\":\"\"}".len + this.package_name.len - else - "{\"name\":\"\",\"version\":\"\"}".len + this.package_name.len + this.package_version.len; + return logger.Source.initPathString(bun.span(package_json_path), mutable.list.items[0..total]); + } - if (total < minimum) return false; + fn verifyPackageJSONNameAndVersion(this: *PackageInstall, root_node_modules_dir: std.fs.Dir, resolution_tag: Resolution.Tag, bin: *Bin) VerifyResult { + var body_pool = Npm.Registry.BodyPool.get(this.allocator); + var mutable: MutableString = body_pool.data; + defer { + body_pool.data = mutable; + Npm.Registry.BodyPool.release(body_pool); + } - break :brk logger.Source.initPathString(bun.span(package_json_path), mutable.list.items[0..total]); - }; + // Read the file + // Return false on any error. + // Don't keep it open while we're parsing the JSON. + // The longer the file stays open, the more likely it causes issues for + // other processes on Windows. + const source = this.getInstalledPackageJsonSource(root_node_modules_dir, &mutable, resolution_tag) orelse return .{}; - var log = logger.Log.init(allocator); + var log = logger.Log.init(this.allocator); defer log.deinit(); initializeStore(); - var package_json_checker = JSON.PackageJSONVersionChecker.init(allocator, &source, &log) catch return false; - _ = package_json_checker.parseExpr() catch return false; - if (log.errors > 0 or !package_json_checker.has_found_name) return false; + var package_json_checker = JSON.PackageJSONVersionChecker.init( + this.allocator, + &source, + &log, + if (bin.isUnset()) .check_for_bin else .ignore_bin, + ) catch return .{}; + _ = package_json_checker.parseExpr(false, false) catch return .{}; + if (log.errors > 0 or !package_json_checker.has_found_name) return .{}; // workspaces aren't required to have a version - if (!package_json_checker.has_found_version and resolution_tag != .workspace) return false; + if (!package_json_checker.has_found_version and resolution_tag != .workspace) return .{}; const found_version = package_json_checker.found_version; @@ -1292,14 +1313,40 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { } // If we didn't find any of these characters, there's no point in checking the version again. // it will never match. - return false; + return .{}; }; - if (!strings.eql(found_version[offset..], this.package_version)) return false; + if (!strings.eql(found_version[offset..], this.package_version)) return .{}; } // lastly, check the name. - return strings.eql(package_json_checker.found_name, this.package_name); + if (strings.eql(package_json_checker.found_name, this.package_name.slice(this.lockfile.buffers.string_bytes.items))) { + // only want to set bins if up-to-date + if (bin.isUnset() and package_json_checker.has_found_bin) { + var string_buf = this.lockfile.stringBuf(); + defer string_buf.apply(this.lockfile); + + switch (package_json_checker.found_bin) { + .bin => |expr| { + bin.* = Bin.parseAppend(this.lockfile.allocator, expr, &string_buf, &this.lockfile.buffers.extern_strings) catch bun.outOfMemory(); + }, + .dir => |expr| { + bin.* = Bin.parseAppendFromDirectories(this.lockfile.allocator, expr, &string_buf) catch bun.outOfMemory(); + }, + } + + return .{ + .valid = true, + .update_lockfile_pointers = true, + }; + } + + return .{ + .valid = true, + }; + } + + return .{}; } pub const Result = union(Tag) { @@ -2952,7 +2999,7 @@ pub const PackageManager = struct { pub const LifecycleScriptTimeLog = struct { const Entry = struct { - package_name: []const u8, + package_name: string, script_id: u8, // nanosecond duration @@ -4500,7 +4547,7 @@ pub const PackageManager = struct { if (this.lockfile.package_index.get(name_hash)) |index| { const resolutions: []Resolution = this.lockfile.packages.items(.resolution); switch (index) { - .PackageID => |existing_id| { + .id => |existing_id| { if (existing_id < resolutions.len) { const existing_resolution = resolutions[existing_id]; if (this.resolutionSatisfiesDependency(existing_resolution, version)) { @@ -4533,7 +4580,7 @@ pub const PackageManager = struct { } } }, - .PackageIDMultiple => |list| { + .ids => |list| { for (list.items) |existing_id| { if (existing_id < resolutions.len) { const existing_resolution = resolutions[existing_id]; @@ -4869,8 +4916,8 @@ pub const PackageManager = struct { .apply_patch_task = if (patch_name_and_version_hash) |h| brk: { const dep = dependency; const pkg_id = switch (this.lockfile.package_index.get(dep.name_hash) orelse @panic("Package not found")) { - .PackageID => |p| p, - .PackageIDMultiple => |ps| ps.items[0], // TODO is this correct + .id => |p| p, + .ids => |ps| ps.items[0], // TODO is this correct }; const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?; const pt = PatchTask.newApplyPatchHash(this, pkg_id, patch_hash, h); @@ -4924,8 +4971,8 @@ pub const PackageManager = struct { .apply_patch_task = if (patch_name_and_version_hash) |h| brk: { const dep = this.lockfile.buffers.dependencies.items[dependency_id]; const pkg_id = switch (this.lockfile.package_index.get(dep.name_hash) orelse @panic("Package not found")) { - .PackageID => |p| p, - .PackageIDMultiple => |ps| ps.items[0], // TODO is this correct + .id => |p| p, + .ids => |ps| ps.items[0], // TODO is this correct }; const patch_hash = this.lockfile.patched_dependencies.get(h).?.patchfileHash().?; const pt = PatchTask.newApplyPatchHash(this, pkg_id, patch_hash, h); @@ -4980,9 +5027,9 @@ pub const PackageManager = struct { pub fn updateLockfileIfNeeded( manager: *PackageManager, - load_lockfile_result: Lockfile.LoadFromDiskResult, + load_result: Lockfile.LoadResult, ) !void { - if (load_lockfile_result == .ok and load_lockfile_result.ok.serializer_result.packages_need_update) { + if (load_result == .ok and load_result.ok.serializer_result.packages_need_update) { const slice = manager.lockfile.packages.slice(); for (slice.items(.meta)) |*meta| { // these are possibly updated later, but need to make sure non are zero @@ -6970,7 +7017,6 @@ pub const PackageManager = struct { // must be a variable due to global installs and bunx bin_path: stringZ = bun.pathLiteral("node_modules/.bin"), - lockfile_path: stringZ = Lockfile.default_filename, did_override_default_scope: bool = false, scope: Npm.Registry.Scope = undefined, @@ -7011,6 +7057,8 @@ pub const PackageManager = struct { ca: []const string = &.{}, ca_file_name: string = &.{}, + save_text_lockfile: bool = false, + pub const PublishConfig = struct { access: ?Access = null, tag: string = "", @@ -7397,6 +7445,8 @@ pub const PackageManager = struct { this.do.trust_dependencies_from_args = true; } + this.save_text_lockfile = cli.save_text_lockfile; + this.local_package_features.optional_dependencies = !cli.omit.optional; const disable_progress_bar = default_disable_progress_bar or cli.no_progress; @@ -8959,24 +9009,10 @@ pub const PackageManager = struct { ) -| std.time.s_per_day; if (root_dir.entries.hasComptimeQuery("bun.lockb")) { - var buf: bun.PathBuffer = undefined; - var parts = [_]string{ - "./bun.lockb", - }; - const lockfile_path = Path.joinAbsStringBuf( - Fs.FileSystem.instance.top_level_dir, - &buf, - &parts, - .auto, - ); - buf[lockfile_path.len] = 0; - const lockfile_path_z = buf[0..lockfile_path.len :0]; - - switch (manager.lockfile.loadFromDisk( + switch (manager.lockfile.loadFromCwd( manager, allocator, log, - lockfile_path_z, true, )) { .ok => |load| manager.lockfile = load.lockfile, @@ -9383,6 +9419,7 @@ pub const PackageManager = struct { clap.parseParam("--registry Use a specific registry by default, overriding .npmrc, bunfig.toml and environment variables") catch unreachable, clap.parseParam("--concurrent-scripts Maximum number of concurrent jobs for lifecycle scripts (default 5)") catch unreachable, clap.parseParam("--network-concurrency Maximum number of concurrent network requests (default 48)") catch unreachable, + clap.parseParam("--save-text-lockfile Save a text-based lockfile") catch unreachable, clap.parseParam("-h, --help Print this help menu") catch unreachable, }; @@ -9511,6 +9548,8 @@ pub const PackageManager = struct { ca: []const string = &.{}, ca_file_name: string = "", + save_text_lockfile: bool = false, + const PatchOpts = union(enum) { nothing: struct {}, patch: struct {}, @@ -9863,6 +9902,10 @@ pub const PackageManager = struct { }; } + if (args.flag("--save-text-lockfile")) { + cli.save_text_lockfile = true; + } + // commands that support --filter if (comptime subcommand.supportsWorkspaceFiltering()) { cli.filters = args.options("--filter"); @@ -10874,8 +10917,8 @@ pub const PackageManager = struct { } } - fn nodeModulesFolderForDependencyIDs(iterator: *Lockfile.Tree.Iterator, ids: []const IdPair) !?Lockfile.Tree.NodeModulesFolder { - while (iterator.nextNodeModulesFolder(null)) |node_modules| { + fn nodeModulesFolderForDependencyIDs(iterator: *Lockfile.Tree.Iterator(.node_modules), ids: []const IdPair) !?Lockfile.Tree.Iterator(.node_modules).Next { + while (iterator.next(null)) |node_modules| { for (ids) |id| { _ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, id[0]) orelse continue; return node_modules; @@ -10884,8 +10927,8 @@ pub const PackageManager = struct { return null; } - fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator, dependency_id: DependencyID) !?Lockfile.Tree.NodeModulesFolder { - while (iterator.nextNodeModulesFolder(null)) |node_modules| { + fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator(.node_modules), dependency_id: DependencyID) !?Lockfile.Tree.Iterator(.node_modules).Next { + while (iterator.next(null)) |node_modules| { _ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, dependency_id) orelse continue; return node_modules; } @@ -10897,11 +10940,11 @@ pub const PackageManager = struct { fn pkgInfoForNameAndVersion( lockfile: *Lockfile, - iterator: *Lockfile.Tree.Iterator, + iterator: *Lockfile.Tree.Iterator(.node_modules), pkg_maybe_version_to_patch: []const u8, name: []const u8, version: ?[]const u8, - ) struct { PackageID, Lockfile.Tree.NodeModulesFolder } { + ) struct { PackageID, Lockfile.Tree.Iterator(.node_modules).Next } { var sfb = std.heap.stackFallback(@sizeOf(IdPair) * 4, lockfile.allocator); var pairs = std.ArrayList(IdPair).initCapacity(sfb.get(), 8) catch bun.outOfMemory(); defer pairs.deinit(); @@ -11075,7 +11118,7 @@ pub const PackageManager = struct { const arg_kind: PatchArgKind = PatchArgKind.fromArg(argument); var folder_path_buf: bun.PathBuffer = undefined; - var iterator = Lockfile.Tree.Iterator.init(manager.lockfile); + var iterator = Lockfile.Tree.Iterator(.node_modules).init(manager.lockfile); var resolution_buf: [1024]u8 = undefined; var win_normalizer: if (bun.Environment.isWindows) bun.PathBuffer else struct {} = undefined; @@ -11115,7 +11158,7 @@ pub const PackageManager = struct { defer manager.allocator.free(package_json_source.contents); initializeStore(); - const json = JSON.parsePackageJSONUTF8AlwaysDecode(&package_json_source, manager.log, manager.allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(&package_json_source, manager.log, manager.allocator) catch |err| { manager.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); Global.crash(); @@ -11143,8 +11186,8 @@ pub const PackageManager = struct { ); Global.crash(); }) { - .PackageID => |id| lockfile.packages.get(id), - .PackageIDMultiple => |ids| id: { + .id => |id| lockfile.packages.get(id), + .ids => |ids| id: { for (ids.items) |id| { const pkg = lockfile.packages.get(id); const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; @@ -11191,7 +11234,7 @@ pub const PackageManager = struct { }, .name_and_version => brk: { const pkg_maybe_version_to_patch = argument; - const name, const version = Dependency.splitNameAndVersion(pkg_maybe_version_to_patch); + const name, const version = Dependency.splitNameAndMaybeVersion(pkg_maybe_version_to_patch); const pkg_id, const folder = pkgInfoForNameAndVersion(manager.lockfile, &iterator, pkg_maybe_version_to_patch, name, version); const pkg = manager.lockfile.packages.get(pkg_id); @@ -11442,7 +11485,7 @@ pub const PackageManager = struct { var folder_path_buf: bun.PathBuffer = undefined; var lockfile: *Lockfile = try manager.allocator.create(Lockfile); defer lockfile.deinit(); - switch (lockfile.loadFromDisk(manager, manager.allocator, manager.log, manager.options.lockfile_path, true)) { + switch (lockfile.loadFromCwd(manager, manager.allocator, manager.log, true)) { .not_found => { Output.errGeneric("Cannot find lockfile. Install packages with `bun install` before patching them.", .{}); Global.crash(); @@ -11507,7 +11550,7 @@ pub const PackageManager = struct { }; defer root_node_modules.close(); - var iterator = Lockfile.Tree.Iterator.init(lockfile); + var iterator = Lockfile.Tree.Iterator(.node_modules).init(lockfile); var resolution_buf: [1024]u8 = undefined; const _cache_dir: std.fs.Dir, const _cache_dir_subpath: stringZ, const _changes_dir: []const u8, const _pkg: Package = switch (arg_kind) { .path => result: { @@ -11528,7 +11571,7 @@ pub const PackageManager = struct { defer manager.allocator.free(package_json_source.contents); initializeStore(); - const json = JSON.parsePackageJSONUTF8AlwaysDecode(&package_json_source, manager.log, manager.allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(&package_json_source, manager.log, manager.allocator) catch |err| { manager.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); Global.crash(); @@ -11556,8 +11599,8 @@ pub const PackageManager = struct { ); Global.crash(); }) { - .PackageID => |id| lockfile.packages.get(id), - .PackageIDMultiple => |ids| brk: { + .id => |id| lockfile.packages.get(id), + .ids => |ids| brk: { for (ids.items) |id| { const pkg = lockfile.packages.get(id); const resolution_label = std.fmt.bufPrint(&resolution_buf, "{}", .{pkg.resolution.fmt(lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; @@ -11586,7 +11629,7 @@ pub const PackageManager = struct { break :result .{ cache_dir, cache_dir_subpath, changes_dir, actual_package }; }, .name_and_version => brk: { - const name, const version = Dependency.splitNameAndVersion(argument); + const name, const version = Dependency.splitNameAndMaybeVersion(argument); const pkg_id, const node_modules = pkgInfoForNameAndVersion(lockfile, &iterator, argument, name, version); const changes_dir = bun.path.joinZBuf(pathbuf[0..], &[_][]const u8{ @@ -12105,7 +12148,7 @@ pub const PackageManager = struct { lockfile: *Lockfile, progress: *Progress, - // relative paths from `nextNodeModulesFolder` will be copied into this list. + // relative paths from `next` will be copied into this list. node_modules: NodeModulesFolder, skip_verify_installed_version_number: bool, @@ -12116,13 +12159,14 @@ pub const PackageManager = struct { options: *const PackageManager.Options, metas: []const Lockfile.Package.Meta, names: []const String, - bins: []const Bin, + pkg_name_hashes: []const PackageNameHash, + bins: []Bin, resolutions: []Resolution, node: *Progress.Node, destination_dir_subpath_buf: bun.PathBuffer = undefined, folder_path_buf: bun.PathBuffer = undefined, successfully_installed: Bitset, - tree_iterator: *Lockfile.Tree.Iterator, + tree_iterator: *Lockfile.Tree.Iterator(.node_modules), command_ctx: Command.Context, current_tree_id: Lockfile.Tree.Id = Lockfile.Tree.invalid_id, @@ -12273,7 +12317,7 @@ pub const PackageManager = struct { } pub fn linkRemainingBins(this: *PackageInstaller, comptime log_level: Options.LogLevel) void { - var depth_buf: Lockfile.Tree.Iterator.DepthBuf = undefined; + var depth_buf: Lockfile.Tree.DepthBuf = undefined; var node_modules_rel_path_buf: bun.PathBuffer = undefined; @memcpy(node_modules_rel_path_buf[0.."node_modules".len], "node_modules"); @@ -12291,6 +12335,7 @@ pub const PackageManager = struct { @intCast(tree_id), &node_modules_rel_path_buf, &depth_buf, + .node_modules, ); this.node_modules.path.appendSlice(rel_path) catch bun.outOfMemory(); @@ -12374,7 +12419,7 @@ pub const PackageManager = struct { // packages upon completing the current tree for (tree.pending_installs.items) |context| { const package_id = resolutions[context.dependency_id]; - const name = lockfile.str(&this.names[package_id]); + const name = this.names[package_id]; const resolution = &this.resolutions[package_id]; this.node_modules.tree_id = context.tree_id; this.node_modules.path = context.path; @@ -12492,6 +12537,7 @@ pub const PackageManager = struct { var packages = this.lockfile.packages.slice(); this.metas = packages.items(.meta); this.names = packages.items(.name); + this.pkg_name_hashes = packages.items(.name_hash); this.bins = packages.items(.bin); this.resolutions = packages.items(.resolution); @@ -12514,21 +12560,21 @@ pub const PackageManager = struct { comptime log_level: Options.LogLevel, ) void { const package_id = this.lockfile.buffers.resolutions.items[dependency_id]; - const name = this.lockfile.str(&this.names[package_id]); + const name = this.names[package_id]; const resolution = &this.resolutions[package_id]; const task_id = switch (resolution.tag) { .git => Task.Id.forGitCheckout(data.url, data.resolved), .github => Task.Id.forTarball(data.url), .local_tarball => Task.Id.forTarball(this.lockfile.str(&resolution.value.local_tarball)), .remote_tarball => Task.Id.forTarball(this.lockfile.str(&resolution.value.remote_tarball)), - .npm => Task.Id.forNPMPackage(name, resolution.value.npm.version), + .npm => Task.Id.forNPMPackage(name.slice(this.lockfile.buffers.string_bytes.items), resolution.value.npm.version), else => unreachable, }; if (!this.installEnqueuedPackagesImpl(name, task_id, log_level)) { if (comptime Environment.allow_assert) { Output.panic("Ran callback to install enqueued packages, but there was no task associated with it. {}:{} (dependency_id: {d})", .{ - bun.fmt.quote(name), + bun.fmt.quote(name.slice(this.lockfile.buffers.string_bytes.items)), bun.fmt.quote(data.url), dependency_id, }); @@ -12538,7 +12584,7 @@ pub const PackageManager = struct { pub fn installEnqueuedPackagesImpl( this: *PackageInstaller, - name: []const u8, + name: String, task_id: Task.Id.Type, comptime log_level: Options.LogLevel, ) bool { @@ -12635,7 +12681,7 @@ pub const PackageManager = struct { } switch (resolution_tag) { - .git, .github, .gitlab, .root => { + .git, .github, .root => { inline for (Lockfile.Scripts.names) |script_name| { count += @intFromBool(!@field(scripts, script_name).isEmpty()); } @@ -12676,7 +12722,7 @@ pub const PackageManager = struct { dependency_id: DependencyID, package_id: PackageID, comptime log_level: Options.LogLevel, - name: string, + pkg_name: String, resolution: *const Resolution, // false when coming from download. if the package was downloaded @@ -12687,30 +12733,34 @@ pub const PackageManager = struct { // pending packages if we're already draining them. comptime is_pending_package_install: bool, ) void { - const buf = this.lockfile.buffers.string_bytes.items; - - const alias = this.lockfile.buffers.dependencies.items[dependency_id].name.slice(buf); + const alias = this.lockfile.buffers.dependencies.items[dependency_id].name; const destination_dir_subpath: [:0]u8 = brk: { - bun.copy(u8, &this.destination_dir_subpath_buf, alias); - this.destination_dir_subpath_buf[alias.len] = 0; - break :brk this.destination_dir_subpath_buf[0..alias.len :0]; + const alias_slice = alias.slice(this.lockfile.buffers.string_bytes.items); + bun.copy(u8, &this.destination_dir_subpath_buf, alias_slice); + this.destination_dir_subpath_buf[alias_slice.len] = 0; + break :brk this.destination_dir_subpath_buf[0..alias_slice.len :0]; }; + const pkg_name_hash = this.pkg_name_hashes[package_id]; + var resolution_buf: [512]u8 = undefined; const package_version = if (resolution.tag == .workspace) brk: { - if (this.manager.lockfile.workspace_versions.get(String.Builder.stringHash(name))) |workspace_version| { - break :brk std.fmt.bufPrint(&resolution_buf, "{}", .{workspace_version.fmt(buf)}) catch unreachable; + if (this.manager.lockfile.workspace_versions.get(pkg_name_hash)) |workspace_version| { + break :brk std.fmt.bufPrint(&resolution_buf, "{}", .{workspace_version.fmt(this.lockfile.buffers.string_bytes.items)}) catch unreachable; } // no version break :brk ""; - } else std.fmt.bufPrint(&resolution_buf, "{}", .{resolution.fmt(buf, .posix)}) catch unreachable; + } else std.fmt.bufPrint(&resolution_buf, "{}", .{resolution.fmt(this.lockfile.buffers.string_bytes.items, .posix)}) catch unreachable; const patch_patch, const patch_contents_hash, const patch_name_and_version_hash, const remove_patch = brk: { if (this.manager.lockfile.patched_dependencies.entries.len == 0 and this.manager.patched_dependencies_to_remove.entries.len == 0) break :brk .{ null, null, null, false }; var sfa = std.heap.stackFallback(1024, this.lockfile.allocator); const alloc = sfa.get(); - const name_and_version = std.fmt.allocPrint(alloc, "{s}@{s}", .{ name, package_version }) catch unreachable; + const name_and_version = std.fmt.allocPrint(alloc, "{s}@{s}", .{ + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + package_version, + }) catch unreachable; defer alloc.free(name_and_version); const name_and_version_hash = String.Builder.stringHash(name_and_version); @@ -12745,7 +12795,7 @@ pub const PackageManager = struct { .destination_dir_subpath = destination_dir_subpath, .destination_dir_subpath_buf = &this.destination_dir_subpath_buf, .allocator = this.lockfile.allocator, - .package_name = name, + .package_name = pkg_name, .patch = if (patch_patch) |p| PackageInstall.Patch{ .patch_contents_hash = patch_contents_hash.?, .patch_path = p, @@ -12755,12 +12805,19 @@ pub const PackageManager = struct { .node_modules = &this.node_modules, .lockfile = this.lockfile, }; - debug("Installing {s}@{s}", .{ name, resolution.fmt(buf, .posix) }); + debug("Installing {s}@{s}", .{ + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + resolution.fmt(this.lockfile.buffers.string_bytes.items, .posix), + }); const pkg_has_patch = !installer.patch.isNull(); switch (resolution.tag) { .npm => { - installer.cache_dir_subpath = this.manager.cachedNPMPackageFolderName(name, resolution.value.npm.version, patch_contents_hash); + installer.cache_dir_subpath = this.manager.cachedNPMPackageFolderName( + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + resolution.value.npm.version, + patch_contents_hash, + ); installer.cache_dir = this.manager.getCacheDirectory(); }, .git => { @@ -12772,7 +12829,7 @@ pub const PackageManager = struct { installer.cache_dir = this.manager.getCacheDirectory(); }, .folder => { - const folder = resolution.value.folder.slice(buf); + const folder = resolution.value.folder.slice(this.lockfile.buffers.string_bytes.items); if (this.lockfile.isWorkspaceTreeId(this.current_tree_id)) { // Handle when a package depends on itself via file: @@ -12805,7 +12862,7 @@ pub const PackageManager = struct { installer.cache_dir = this.manager.getCacheDirectory(); }, .workspace => { - const folder = resolution.value.workspace.slice(buf); + const folder = resolution.value.workspace.slice(this.lockfile.buffers.string_bytes.items); // Handle when a package depends on itself if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { installer.cache_dir_subpath = "."; @@ -12824,7 +12881,7 @@ pub const PackageManager = struct { const directory = this.manager.globalLinkDir() catch |err| { if (comptime log_level != .silent) { const fmt = "\nerror: unable to access global directory while installing {s}: {s}\n"; - const args = .{ name, @errorName(err) }; + const args = .{ pkg_name.slice(this.lockfile.buffers.string_bytes.items), @errorName(err) }; if (comptime log_level.showProgress()) { switch (Output.enable_ansi_colors) { @@ -12848,7 +12905,7 @@ pub const PackageManager = struct { return; }; - const folder = resolution.value.symlink.slice(buf); + const folder = resolution.value.symlink.slice(this.lockfile.buffers.string_bytes.items); if (folder.len == 0 or (folder.len == 1 and folder[0] == '.')) { installer.cache_dir_subpath = "."; @@ -12880,11 +12937,19 @@ pub const PackageManager = struct { }, } - const needs_install = this.force_install or this.skip_verify_installed_version_number or !needs_verify or remove_patch or !installer.verify( - resolution, - buf, - this.root_node_modules_folder, - ); + const needs_install = this.force_install or this.skip_verify_installed_version_number or !needs_verify or remove_patch or verify: { + const verified = installer.verify( + resolution, + this.root_node_modules_folder, + &this.bins[package_id], + ); + + if (verified.update_lockfile_pointers) { + this.fixCachedLockfilePackageSlices(); + } + + break :verify !verified.valid; + }; this.summary.skipped += @intFromBool(!needs_install); if (needs_install) { @@ -12904,7 +12969,7 @@ pub const PackageManager = struct { .git => { this.manager.enqueueGitForCheckout( dependency_id, - alias, + alias.slice(this.lockfile.buffers.string_bytes.items), resolution, context, patch_name_and_version_hash, @@ -12924,7 +12989,7 @@ pub const PackageManager = struct { .local_tarball => { this.manager.enqueueTarballForReading( dependency_id, - alias, + alias.slice(this.lockfile.buffers.string_bytes.items), resolution, context, ); @@ -12933,7 +12998,7 @@ pub const PackageManager = struct { this.manager.enqueueTarballForDownload( dependency_id, package_id, - resolution.value.remote_tarball.slice(buf), + resolution.value.remote_tarball.slice(this.lockfile.buffers.string_bytes.items), context, patch_name_and_version_hash, ); @@ -12943,16 +13008,19 @@ pub const PackageManager = struct { // Very old versions of Bun didn't store the tarball url when it didn't seem necessary // This caused bugs. We can't assert on it because they could come from old lockfiles if (resolution.value.npm.url.isEmpty()) { - Output.debugWarn("package {s}@{} missing tarball_url", .{ name, resolution.fmt(buf, .posix) }); + Output.debugWarn("package {s}@{} missing tarball_url", .{ + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + resolution.fmt(this.lockfile.buffers.string_bytes.items, .posix), + }); } } this.manager.enqueuePackageForDownload( - name, + pkg_name.slice(this.lockfile.buffers.string_bytes.items), dependency_id, package_id, resolution.value.npm.version, - resolution.value.npm.url.slice(buf), + resolution.value.npm.url.slice(this.lockfile.buffers.string_bytes.items), context, patch_name_and_version_hash, ); @@ -13002,7 +13070,7 @@ pub const PackageManager = struct { var destination_dir = this.node_modules.makeAndOpenDir(this.root_node_modules_folder) catch |err| { if (log_level != .silent) { Output.err(err, "Failed to open node_modules folder for {s} in {s}", .{ - name, + pkg_name.slice(this.lockfile.buffers.string_bytes.items), bun.fmt.fmtPath(u8, this.node_modules.path.items, .{}), }); } @@ -13051,21 +13119,32 @@ pub const PackageManager = struct { this.node.completeOne(); } + if (this.bins[package_id].isUnset()) { + this.bins[package_id] = this.getPackageBin( + &installer, + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + pkg_name_hash, + resolution, + ) catch |err| switch (err) { + error.OutOfMemory => bun.outOfMemory(), + }; + } + if (this.bins[package_id].tag != .none) { this.trees[this.current_tree_id].binaries.add(dependency_id) catch bun.outOfMemory(); } const dep = this.lockfile.buffers.dependencies.items[dependency_id]; - const name_hash: TruncatedPackageNameHash = @truncate(dep.name_hash); + const truncated_dep_name_hash: TruncatedPackageNameHash = @truncate(dep.name_hash); const is_trusted, const is_trusted_through_update_request = brk: { - if (this.trusted_dependencies_from_update_requests.contains(name_hash)) break :brk .{ true, true }; - if (this.lockfile.hasTrustedDependency(alias)) break :brk .{ true, false }; + if (this.trusted_dependencies_from_update_requests.contains(truncated_dep_name_hash)) break :brk .{ true, true }; + if (this.lockfile.hasTrustedDependency(alias.slice(this.lockfile.buffers.string_bytes.items))) break :brk .{ true, false }; break :brk .{ false, false }; }; if (resolution.tag != .root and (resolution.tag == .workspace or is_trusted)) { if (this.enqueueLifecycleScripts( - alias, + alias.slice(this.lockfile.buffers.string_bytes.items), log_level, destination_dir, package_id, @@ -13075,11 +13154,11 @@ pub const PackageManager = struct { if (is_trusted_through_update_request) { this.manager.trusted_deps_to_add_to_package_json.append( this.manager.allocator, - this.manager.allocator.dupe(u8, alias) catch bun.outOfMemory(), + this.manager.allocator.dupe(u8, alias.slice(this.lockfile.buffers.string_bytes.items)) catch bun.outOfMemory(), ) catch bun.outOfMemory(); if (this.lockfile.trusted_dependencies == null) this.lockfile.trusted_dependencies = .{}; - this.lockfile.trusted_dependencies.?.put(this.manager.allocator, name_hash, {}) catch bun.outOfMemory(); + this.lockfile.trusted_dependencies.?.put(this.manager.allocator, truncated_dep_name_hash, {}) catch bun.outOfMemory(); } } } @@ -13091,16 +13170,22 @@ pub const PackageManager = struct { else => if (!is_trusted and this.metas[package_id].hasInstallScript()) { // Check if the package actually has scripts. `hasInstallScript` can be false positive if a package is published with // an auto binding.gyp rebuild script but binding.gyp is excluded from the published files. - const count = this.getInstalledPackageScriptsCount(alias, package_id, resolution.tag, destination_dir, log_level); + const count = this.getInstalledPackageScriptsCount( + alias.slice(this.lockfile.buffers.string_bytes.items), + package_id, + resolution.tag, + destination_dir, + log_level, + ); if (count > 0) { if (comptime log_level.isVerbose()) { Output.prettyError("Blocked {d} scripts for: {s}@{}\n", .{ count, - alias, + alias.slice(this.lockfile.buffers.string_bytes.items), resolution.fmt(this.lockfile.buffers.string_bytes.items, .posix), }); } - const entry = this.summary.packages_with_blocked_scripts.getOrPut(this.manager.allocator, name_hash) catch bun.outOfMemory(); + const entry = this.summary.packages_with_blocked_scripts.getOrPut(this.manager.allocator, truncated_dep_name_hash) catch bun.outOfMemory(); if (!entry.found_existing) entry.value_ptr.* = 0; entry.value_ptr.* += count; } @@ -13121,7 +13206,7 @@ pub const PackageManager = struct { if (cause.err == error.DanglingSymlink) { Output.prettyErrorln( "error: {s} \"link:{s}\" not found (try running 'bun link' in the intended package's folder)", - .{ @errorName(cause.err), this.names[package_id].slice(buf) }, + .{ @errorName(cause.err), this.names[package_id].slice(this.lockfile.buffers.string_bytes.items) }, ); this.summary.fail += 1; } else if (cause.err == error.AccessDenied) { @@ -13137,7 +13222,7 @@ pub const PackageManager = struct { if (!Environment.isWindows) { const stat = bun.sys.fstat(bun.toFD(destination_dir)).unwrap() catch |err| { Output.err("EACCES", "Permission denied while installing {s}", .{ - this.names[package_id].slice(buf), + this.names[package_id].slice(this.lockfile.buffers.string_bytes.items), }); if (Environment.isDebug) { Output.err(err, "Failed to stat node_modules", .{}); @@ -13161,20 +13246,30 @@ pub const PackageManager = struct { } Output.err("EACCES", "Permission denied while installing {s}", .{ - this.names[package_id].slice(buf), + this.names[package_id].slice(this.lockfile.buffers.string_bytes.items), }); this.summary.fail += 1; } else { Output.prettyErrorln( "error: {s} installing {s} ({s})", - .{ @errorName(cause.err), this.names[package_id].slice(buf), install_result.fail.step.name() }, + .{ @errorName(cause.err), this.names[package_id].slice(this.lockfile.buffers.string_bytes.items), install_result.fail.step.name() }, ); this.summary.fail += 1; } }, } } else { + if (this.bins[package_id].isUnset()) { + this.bins[package_id] = this.getPackageBin( + &installer, + pkg_name.slice(this.lockfile.buffers.string_bytes.items), + pkg_name_hash, + resolution, + ) catch |err| switch (err) { + error.OutOfMemory => bun.outOfMemory(), + }; + } if (this.bins[package_id].tag != .none) { this.trees[this.current_tree_id].binaries.add(dependency_id) catch bun.outOfMemory(); } @@ -13182,7 +13277,7 @@ pub const PackageManager = struct { var destination_dir = this.node_modules.makeAndOpenDir(this.root_node_modules_folder) catch |err| { if (log_level != .silent) { Output.err(err, "Failed to open node_modules folder for {s} in {s}", .{ - name, + pkg_name.slice(this.lockfile.buffers.string_bytes.items), bun.fmt.fmtPath(u8, this.node_modules.path.items, .{}), }); } @@ -13198,12 +13293,12 @@ pub const PackageManager = struct { defer if (!pkg_has_patch) this.incrementTreeInstallCount(this.current_tree_id, destination_dir, !is_pending_package_install, log_level); const dep = this.lockfile.buffers.dependencies.items[dependency_id]; - const name_hash: TruncatedPackageNameHash = @truncate(dep.name_hash); + const truncated_dep_name_hash: TruncatedPackageNameHash = @truncate(dep.name_hash); const is_trusted, const is_trusted_through_update_request, const add_to_lockfile = brk: { // trusted through a --trust dependency. need to enqueue scripts, write to package.json, and add to lockfile - if (this.trusted_dependencies_from_update_requests.contains(name_hash)) break :brk .{ true, true, true }; + if (this.trusted_dependencies_from_update_requests.contains(truncated_dep_name_hash)) break :brk .{ true, true, true }; - if (this.manager.summary.added_trusted_dependencies.get(name_hash)) |should_add_to_lockfile| { + if (this.manager.summary.added_trusted_dependencies.get(truncated_dep_name_hash)) |should_add_to_lockfile| { // is a new trusted dependency. need to enqueue scripts and maybe add to lockfile break :brk .{ true, false, should_add_to_lockfile }; } @@ -13212,7 +13307,7 @@ pub const PackageManager = struct { if (resolution.tag != .root and is_trusted) { if (this.enqueueLifecycleScripts( - alias, + alias.slice(this.lockfile.buffers.string_bytes.items), log_level, destination_dir, package_id, @@ -13222,19 +13317,82 @@ pub const PackageManager = struct { if (is_trusted_through_update_request) { this.manager.trusted_deps_to_add_to_package_json.append( this.manager.allocator, - this.manager.allocator.dupe(u8, alias) catch bun.outOfMemory(), + this.manager.allocator.dupe(u8, alias.slice(this.lockfile.buffers.string_bytes.items)) catch bun.outOfMemory(), ) catch bun.outOfMemory(); } if (add_to_lockfile) { if (this.lockfile.trusted_dependencies == null) this.lockfile.trusted_dependencies = .{}; - this.lockfile.trusted_dependencies.?.put(this.manager.allocator, name_hash, {}) catch bun.outOfMemory(); + this.lockfile.trusted_dependencies.?.put(this.manager.allocator, truncated_dep_name_hash, {}) catch bun.outOfMemory(); } } } } } + fn getPackageBin( + this: *PackageInstaller, + installer: *PackageInstall, + pkg_name: string, + pkg_name_hash: PackageNameHash, + resolution: *const Resolution, + ) OOM!Bin { + defer this.fixCachedLockfilePackageSlices(); + + if (resolution.tag == .npm) { + var expired = false; + if (this.manager.manifests.byNameHashAllowExpired( + this.manager, + this.manager.scopeForPackageName(pkg_name), + pkg_name_hash, + &expired, + .load_from_memory_fallback_to_disk, + )) |manifest| { + if (manifest.findByVersion(resolution.value.npm.version)) |find| { + return find.package.bin.cloneAppend(manifest.string_buf, manifest.extern_strings_bin_entries, this.lockfile); + } + } + } + + // get it from package.json + var body_pool = Npm.Registry.BodyPool.get(this.lockfile.allocator); + var mutable = body_pool.data; + defer { + body_pool.data = mutable; + Npm.Registry.BodyPool.release(body_pool); + } + + const source = installer.getInstalledPackageJsonSource(this.root_node_modules_folder, &mutable, resolution.tag) orelse return .{}; + + initializeStore(); + + var log = logger.Log.init(this.lockfile.allocator); + defer log.deinit(); + + var bin_finder = JSON.PackageJSONVersionChecker.init( + this.lockfile.allocator, + &source, + &log, + .only_bin, + ) catch return .{}; + _ = bin_finder.parseExpr(false, false) catch return .{}; + + if (bin_finder.has_found_bin) { + var string_buf = this.lockfile.stringBuf(); + defer { + string_buf.apply(this.lockfile); + this.fixCachedLockfilePackageSlices(); + } + + return switch (bin_finder.found_bin) { + .bin => |bin| try Bin.parseAppend(this.lockfile.allocator, bin, &string_buf, &this.lockfile.buffers.extern_strings), + .dir => |dir| try Bin.parseAppendFromDirectories(this.lockfile.allocator, dir, &string_buf), + }; + } + + return .{}; + } + // returns true if scripts are enqueued fn enqueueLifecycleScripts( this: *PackageInstaller, @@ -13340,7 +13498,7 @@ pub const PackageManager = struct { return; } - const name = this.lockfile.str(&this.names[package_id]); + const name = this.names[package_id]; const resolution = &this.resolutions[package_id]; const needs_verify = true; @@ -13613,7 +13771,7 @@ pub const PackageManager = struct { }; { - var iterator = Lockfile.Tree.Iterator.init(this.lockfile); + var iterator = Lockfile.Tree.Iterator(.node_modules).init(this.lockfile); if (comptime Environment.isPosix) { Bin.Linker.ensureUmask(); } @@ -13706,6 +13864,7 @@ pub const PackageManager = struct { .bins = parts.items(.bin), .root_node_modules_folder = node_modules_folder, .names = parts.items(.name), + .pkg_name_hashes = parts.items(.name_hash), .resolutions = parts.items(.resolution), .lockfile = this.lockfile, .node = &install_node, @@ -13753,7 +13912,7 @@ pub const PackageManager = struct { defer installer.deinit(); - while (iterator.nextNodeModulesFolder(&installer.completed_trees)) |node_modules| { + while (iterator.next(&installer.completed_trees)) |node_modules| { installer.node_modules.path.items.len = strings.withoutTrailingSlash(FileSystem.instance.top_level_dir).len + 1; try installer.node_modules.path.appendSlice(node_modules.relative_path); installer.node_modules.tree_id = node_modules.tree_id; @@ -13967,7 +14126,7 @@ pub const PackageManager = struct { const buf = this.lockfile.buffers.string_bytes.items; // need to clone because this is a copy before Lockfile.cleanWithLogger - const name = this.allocator.dupe(u8, root_package.name.slice(buf)) catch bun.outOfMemory(); + const name = root_package.name.slice(buf); const top_level_dir_without_trailing_slash = strings.withoutTrailingSlash(FileSystem.instance.top_level_dir); if (root_package.scripts.hasAny()) { @@ -14012,24 +14171,24 @@ pub const PackageManager = struct { bun.dns.internal.prefetch(manager.event_loop.loop(), hostname); } - var load_lockfile_result: Lockfile.LoadFromDiskResult = if (manager.options.do.load_lockfile) - manager.lockfile.loadFromDisk( + var load_result: Lockfile.LoadResult = if (manager.options.do.load_lockfile) + manager.lockfile.loadFromCwd( manager, manager.allocator, manager.log, - manager.options.lockfile_path, true, ) else .{ .not_found = {} }; - try manager.updateLockfileIfNeeded(load_lockfile_result); + try manager.updateLockfileIfNeeded(load_result); var root = Lockfile.Package{}; - var needs_new_lockfile = load_lockfile_result != .ok or - (load_lockfile_result.ok.lockfile.buffers.dependencies.items.len == 0 and manager.update_requests.len > 0); + var needs_new_lockfile = load_result != .ok or + (load_result.ok.lockfile.buffers.dependencies.items.len == 0 and manager.update_requests.len > 0); - manager.options.enable.force_save_lockfile = manager.options.enable.force_save_lockfile or (load_lockfile_result == .ok and load_lockfile_result.ok.was_migrated); + manager.options.enable.force_save_lockfile = manager.options.enable.force_save_lockfile or + (load_result == .ok and (load_result.ok.was_migrated or (load_result.ok.format == .binary and manager.options.save_text_lockfile))); // this defaults to false // but we force allowing updates to the lockfile when you do bun add @@ -14039,32 +14198,32 @@ pub const PackageManager = struct { // Step 2. Parse the package.json file const root_package_json_source = logger.Source.initPathString(package_json_cwd, root_package_json_contents); - switch (load_lockfile_result) { + switch (load_result) { .err => |cause| { if (log_level != .silent) { switch (cause.step) { - .open_file => Output.prettyError("error opening lockfile: {s}\n", .{ - @errorName(cause.value), + .open_file => Output.err(cause.value, "failed to open lockfile: '{s}'", .{ + cause.lockfile_path, }), - .parse_file => Output.prettyError("error parsing lockfile: {s}\n", .{ - @errorName(cause.value), + .parse_file => Output.err(cause.value, "failed to parse lockfile: '{s}'", .{ + cause.lockfile_path, }), - .read_file => Output.prettyError("error reading lockfile: {s}\n", .{ - @errorName(cause.value), + .read_file => Output.err(cause.value, "failed to read lockfile: '{s}'", .{ + cause.lockfile_path, }), - .migrating => Output.prettyError("error migrating lockfile: {s}\n", .{ - @errorName(cause.value), + .migrating => Output.err(cause.value, "failed to migrate lockfile: '{s}'", .{ + cause.lockfile_path, }), } - if (manager.options.enable.fail_early) { - Output.prettyError("failed to load lockfile\n", .{}); - } else { - Output.prettyError("ignoring lockfile\n", .{}); + if (!manager.options.enable.fail_early) { + Output.printErrorln("", .{}); + Output.warn("Ignoring lockfile", .{}); } if (ctx.log.errors > 0) { try manager.log.print(Output.errorWriter()); + manager.log.reset(); } Output.flush(); } @@ -14111,7 +14270,7 @@ pub const PackageManager = struct { } } differ: { - root = load_lockfile_result.ok.lockfile.rootPackage() orelse { + root = load_result.ok.lockfile.rootPackage() orelse { needs_new_lockfile = true; break :differ; }; @@ -14352,7 +14511,7 @@ pub const PackageManager = struct { root = .{}; manager.lockfile.initEmpty(manager.allocator); - if (manager.options.enable.frozen_lockfile and load_lockfile_result != .not_found) { + if (manager.options.enable.frozen_lockfile and load_result != .not_found) { if (comptime log_level != .silent) { Output.prettyErrorln("error: lockfile had changes, but lockfile is frozen", .{}); } @@ -14592,7 +14751,7 @@ pub const PackageManager = struct { const packages_len_before_install = manager.lockfile.packages.len; - if (manager.options.enable.frozen_lockfile and load_lockfile_result != .not_found) { + if (manager.options.enable.frozen_lockfile and load_result != .not_found) { if (manager.lockfile.hasMetaHashChanged(PackageManager.verbose_install or manager.options.do.print_meta_hash_string, packages_len_before_install) catch false) { if (comptime log_level != .silent) { Output.prettyErrorln("error: lockfile had changes, but lockfile is frozen", .{}); @@ -14631,15 +14790,21 @@ pub const PackageManager = struct { // this will handle new trusted dependencies added through --trust manager.update_requests.len > 0 or - (load_lockfile_result == .ok and load_lockfile_result.ok.serializer_result.packages_need_update); + (load_result == .ok and load_result.ok.serializer_result.packages_need_update); // It's unnecessary work to re-save the lockfile if there are no changes if (manager.options.do.save_lockfile and (should_save_lockfile or manager.lockfile.isEmpty() or manager.options.enable.force_save_lockfile)) save: { if (manager.lockfile.isEmpty()) { - if (!manager.options.dry_run) { - std.fs.cwd().deleteFileZ(manager.options.lockfile_path) catch |err| brk: { + if (!manager.options.dry_run) delete: { + const delete_format = switch (load_result) { + .not_found => break :delete, + .err => |err| err.format, + .ok => |ok| ok.format, + }; + + std.fs.cwd().deleteFileZ(if (delete_format == .text) "bun.lock" else "bun.lockb") catch |err| brk: { // we don't care if (err == error.FileNotFound) { if (had_any_diffs) break :save; @@ -14672,7 +14837,15 @@ pub const PackageManager = struct { manager.progress.refresh(); } - manager.lockfile.saveToDisk(manager.options.lockfile_path, manager.options.log_level.isVerbose()); + const save_format: Lockfile.LoadResult.LockfileFormat = if (manager.options.save_text_lockfile) + .text + else switch (load_result) { + .not_found => .binary, + .err => |err| err.format, + .ok => |ok| ok.format, + }; + + manager.lockfile.saveToDisk(save_format, manager.options.log_level.isVerbose()); if (comptime Environment.allow_assert) { if (manager.lockfile.hasMetaHashChanged(false, packages_len_before_install) catch false) { @@ -15001,6 +15174,11 @@ pub const bun_install_js_bindings = struct { const cwd = try args[0].toSliceOrNull(globalObject); defer cwd.deinit(); + var dir = bun.openDirAbsolute(cwd.slice()) catch |err| { + return globalObject.throw("failed to open: {s}, '{s}'", .{ @errorName(err), cwd.slice() }); + }; + defer dir.close(); + const lockfile_path = Path.joinAbsStringZ(cwd.slice(), &[_]string{"bun.lockb"}, .auto); var lockfile: Lockfile = undefined; @@ -15012,14 +15190,14 @@ pub const bun_install_js_bindings = struct { // as long as we aren't migration from `package-lock.json`, leaving this undefined is okay const manager = globalObject.bunVM().bundler.resolver.getPackageManager(); - const load_result: Lockfile.LoadFromDiskResult = lockfile.loadFromDisk(manager, allocator, &log, lockfile_path, true); + const load_result: Lockfile.LoadResult = lockfile.loadFromDir(bun.toFD(dir), manager, allocator, &log, true); switch (load_result) { .err => |err| { - return globalObject.throw("failed to load lockfile: {s}, \"{s}\"", .{ @errorName(err.value), lockfile_path }); + return globalObject.throw("failed to load lockfile: {s}, '{s}'", .{ @errorName(err.value), lockfile_path }); }, .not_found => { - return globalObject.throw("lockfile not found: \"{s}\"", .{lockfile_path}); + return globalObject.throw("lockfile not found: '{s}'", .{lockfile_path}); }, .ok => {}, } diff --git a/src/install/integrity.zig b/src/install/integrity.zig index 726f824ee4a537..4e8f47d90e543b 100644 --- a/src/install/integrity.zig +++ b/src/install/integrity.zig @@ -66,7 +66,7 @@ pub const Integrity = extern struct { return integrity; } - pub fn parse(buf: []const u8) !Integrity { + pub fn parse(buf: []const u8) Integrity { if (buf.len < "sha256-".len) { return Integrity{ .tag = Tag.unknown, diff --git a/src/install/lifecycle_script_runner.zig b/src/install/lifecycle_script_runner.zig index 3900e790e8d35c..c2b2a089e5c1d7 100644 --- a/src/install/lifecycle_script_runner.zig +++ b/src/install/lifecycle_script_runner.zig @@ -10,11 +10,13 @@ const Global = bun.Global; const JSC = bun.JSC; const WaiterThread = bun.spawn.WaiterThread; const Timer = std.time.Timer; +const String = bun.Semver.String; +const string = bun.string; const Process = bun.spawn.Process; const log = Output.scoped(.Script, false); pub const LifecycleScriptSubprocess = struct { - package_name: []const u8, + package_name: string, scripts: Lockfile.Package.Scripts.List, current_script_index: u8 = 0, diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index bf93c609326211..eeb196159f6660 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -12,6 +12,8 @@ const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const C = bun.C; const JSAst = bun.JSAst; +const TextLockfile = @import("./bun.lock.zig"); +const OOM = bun.OOM; const JSLexer = bun.js_lexer; const logger = bun.logger; @@ -75,6 +77,7 @@ const ExternalStringMap = Install.ExternalStringMap; const Features = Install.Features; const initializeStore = Install.initializeStore; const invalid_package_id = Install.invalid_package_id; +const invalid_dependency_id = Install.invalid_dependency_id; const Origin = Install.Origin; const PackageID = Install.PackageID; const PackageInstall = Install.PackageInstall; @@ -129,6 +132,8 @@ const GlobWalker = bun.glob.GlobWalker_(ignoredWorkspacePaths, bun.glob.SyscallA /// The version of the lockfile format, intended to prevent data corruption for format changes. format: FormatVersion = FormatVersion.current, +text_lockfile_version: TextLockfile.Version = .v0, + meta_hash: MetaHash = zero_hash, packages: Lockfile.Package.List = .{}, @@ -209,63 +214,180 @@ pub fn isEmpty(this: *const Lockfile) bool { return this.packages.len == 0 or (this.packages.len == 1 and this.packages.get(0).resolutions.len == 0); } -pub const LoadFromDiskResult = union(enum) { +pub const LoadResult = union(enum) { not_found: void, err: struct { step: Step, value: anyerror, + lockfile_path: stringZ, + format: LockfileFormat, }, ok: struct { lockfile: *Lockfile, + loaded_from_binary_lockfile: bool, was_migrated: bool = false, serializer_result: Serializer.SerializerLoadResult, + format: LockfileFormat, }, + pub const LockfileFormat = enum { + text, + binary, + + pub fn filename(this: LockfileFormat) stringZ { + return switch (this) { + .text => "bun.lock", + .binary => "bun.lockb", + }; + } + }; + pub const Step = enum { open_file, read_file, parse_file, migrating }; }; -pub fn loadFromDisk( +pub fn loadFromCwd( this: *Lockfile, manager: ?*PackageManager, allocator: Allocator, log: *logger.Log, - filename: stringZ, comptime attempt_loading_from_other_lockfile: bool, -) LoadFromDiskResult { +) LoadResult { + return loadFromDir(this, bun.FD.cwd(), manager, allocator, log, attempt_loading_from_other_lockfile); +} + +pub fn loadFromDir( + this: *Lockfile, + dir: bun.FD, + manager: ?*PackageManager, + allocator: Allocator, + log: *logger.Log, + comptime attempt_loading_from_other_lockfile: bool, +) LoadResult { if (comptime Environment.allow_assert) assert(FileSystem.instance_loaded); - const buf = (if (filename.len > 0) - File.readFrom(std.fs.cwd(), filename, allocator).unwrap() - else - File.from(std.io.getStdIn()).readToEnd(allocator).unwrap()) catch |err| { - return switch (err) { - error.EACCESS, error.EPERM, error.ENOENT => { - if (comptime attempt_loading_from_other_lockfile) { - if (manager) |pm| { - // Attempt to load from "package-lock.json", "yarn.lock", etc. - return migration.detectAndLoadOtherLockfile( - this, - pm, - allocator, - log, - filename, - ); + var lockfile_format: LoadResult.LockfileFormat = .text; + const file = File.openat(dir, "bun.lock", bun.O.RDONLY, 0).unwrap() catch |text_open_err| file: { + if (text_open_err != error.ENOENT) { + return .{ .err = .{ + .step = .open_file, + .value = text_open_err, + .lockfile_path = "bun.lock", + .format = .text, + } }; + } + + lockfile_format = .binary; + + break :file File.openat(dir, "bun.lockb", bun.O.RDONLY, 0).unwrap() catch |binary_open_err| { + if (binary_open_err != error.ENOENT) { + return .{ .err = .{ + .step = .open_file, + .value = binary_open_err, + .lockfile_path = "bun.lockb", + .format = .binary, + } }; + } + + if (comptime attempt_loading_from_other_lockfile) { + if (manager) |pm| { + const migrate_result = migration.detectAndLoadOtherLockfile( + this, + dir, + pm, + allocator, + log, + ); + + if (migrate_result == .ok) { + lockfile_format = .text; } + + return migrate_result; } + } - return LoadFromDiskResult{ - .err = .{ .step = .open_file, .value = err }, - }; - }, - error.EINVAL, error.ENOTDIR, error.EISDIR => LoadFromDiskResult{ .not_found = {} }, - else => LoadFromDiskResult{ .err = .{ .step = .open_file, .value = err } }, + return .not_found; }; }; - return this.loadFromBytes(manager, buf, allocator, log); + const buf = file.readToEnd(allocator).unwrap() catch |err| { + return .{ .err = .{ + .step = .read_file, + .value = err, + .lockfile_path = if (lockfile_format == .text) "bun.lock" else "bun.lockb", + .format = lockfile_format, + } }; + }; + + if (lockfile_format == .text) { + const source = logger.Source.initPathString("bun.lock", buf); + const json = JSON.parsePackageJSONUTF8(&source, log, allocator) catch |err| { + return .{ + .err = .{ + .step = .parse_file, + .value = err, + .lockfile_path = "bun.lock", + .format = lockfile_format, + }, + }; + }; + + TextLockfile.parseIntoBinaryLockfile(this, allocator, json, &source, log, manager) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + else => { + return .{ + .err = .{ + .step = .parse_file, + .value = err, + .lockfile_path = "bun.lock", + .format = lockfile_format, + }, + }; + }, + } + }; + + return .{ + .ok = .{ + .lockfile = this, + .serializer_result = .{}, + .loaded_from_binary_lockfile = false, + .format = lockfile_format, + }, + }; + } + + const result = this.loadFromBytes(manager, buf, allocator, log); + + switch (result) { + .ok => { + if (bun.getenvZ("BUN_DEBUG_TEST_TEXT_LOCKFILE") != null) { + + // Convert the loaded binary lockfile into a text lockfile in memory, then + // parse it back into a binary lockfile. + + const text_lockfile_bytes = TextLockfile.Stringifier.saveFromBinary(allocator, result.ok.lockfile) catch |err| { + Output.panic("failed to convert binary lockfile to text lockfile: {s}", .{@errorName(err)}); + }; + + const source = logger.Source.initPathString("bun.lock", text_lockfile_bytes); + const json = JSON.parsePackageJSONUTF8(&source, log, allocator) catch |err| { + Output.panic("failed to print valid json from binary lockfile: {s}", .{@errorName(err)}); + }; + + TextLockfile.parseIntoBinaryLockfile(this, allocator, json, &source, log, manager) catch |err| { + Output.panic("failed to parse text lockfile converted from binary lockfile: {s}", .{@errorName(err)}); + }; + } + }, + else => {}, + } + + return result; } -pub fn loadFromBytes(this: *Lockfile, pm: ?*PackageManager, buf: []u8, allocator: Allocator, log: *logger.Log) LoadFromDiskResult { +pub fn loadFromBytes(this: *Lockfile, pm: ?*PackageManager, buf: []u8, allocator: Allocator, log: *logger.Log) LoadResult { var stream = Stream{ .buffer = buf, .pos = 0 }; this.format = FormatVersion.current; @@ -277,17 +399,19 @@ pub fn loadFromBytes(this: *Lockfile, pm: ?*PackageManager, buf: []u8, allocator this.patched_dependencies = .{}; const load_result = Lockfile.Serializer.load(this, &stream, allocator, log, pm) catch |err| { - return LoadFromDiskResult{ .err = .{ .step = .parse_file, .value = err } }; + return LoadResult{ .err = .{ .step = .parse_file, .value = err, .lockfile_path = "bun.lockb", .format = .binary } }; }; if (Environment.allow_assert) { this.verifyData() catch @panic("lockfile data is corrupt"); } - return LoadFromDiskResult{ + return LoadResult{ .ok = .{ .lockfile = this, .serializer_result = load_result, + .loaded_from_binary_lockfile = true, + .format = .binary, }, }; } @@ -299,7 +423,13 @@ pub const InstallResult = struct { pub const Tree = struct { id: Id = invalid_id, - dependency_id: DependencyID = invalid_package_id, + + // Should not be used for anything other than name + // through `folderName()`. There is not guarentee a dependency + // id chosen for a tree node is the same behavior or has the + // same version literal for packages hoisted. + dependency_id: DependencyID = invalid_dependency_id, + parent: Id = invalid_id, dependencies: Lockfile.DependencyIDSlice = .{}, @@ -309,6 +439,12 @@ pub const Tree = struct { pub const List = std.ArrayListUnmanaged(Tree); pub const Id = u32; + pub fn folderName(this: *const Tree, deps: []const Dependency, buf: string) string { + const dep_id = this.dependency_id; + if (dep_id == invalid_dependency_id) return ""; + return deps[dep_id].name.slice(buf); + } + pub fn toExternal(this: Tree) External { var out = External{}; out[0..4].* = @as(Id, @bitCast(this.id)); @@ -340,89 +476,105 @@ pub const Tree = struct { const SubtreeError = error{ OutOfMemory, DependencyLoop }; - pub const NodeModulesFolder = struct { - relative_path: stringZ, - dependencies: []const DependencyID, - tree_id: Tree.Id, + // max number of node_modules folders + pub const max_depth = (bun.MAX_PATH_BYTES / "node_modules".len) + 1; - /// depth of the node_modules folder in the tree - /// - /// 0 (./node_modules) - /// / \ - /// 1 1 - /// / - /// 2 - depth: usize, + pub const DepthBuf = [max_depth]Id; + + const IteratorPathStyle = enum { + /// `relative_path` will have the form `node_modules/jquery/node_modules/zod`. + /// Path separators are platform. + node_modules, + /// `relative_path` will have the form `jquery/zod`. Path separators are always + /// posix separators. + pkg_path, }; - // max number of node_modules folders - pub const max_depth = (bun.MAX_PATH_BYTES / "node_modules".len) + 1; + pub fn Iterator(comptime path_style: IteratorPathStyle) type { + return struct { + tree_id: Id, + path_buf: bun.PathBuffer = undefined, - pub const Iterator = struct { - tree_id: Id, - path_buf: bun.PathBuffer = undefined, - last_parent: Id = invalid_id, + lockfile: *const Lockfile, - lockfile: *const Lockfile, + depth_stack: DepthBuf = undefined, - depth_stack: DepthBuf = undefined, + pub fn init(lockfile: *const Lockfile) @This() { + var iter: @This() = .{ + .tree_id = 0, + .lockfile = lockfile, + }; + if (comptime path_style == .node_modules) { + @memcpy(iter.path_buf[0.."node_modules".len], "node_modules"); + } + return iter; + } - pub const DepthBuf = [max_depth]Id; + pub fn reset(this: *@This()) void { + this.tree_id = 0; + } - pub fn init(lockfile: *const Lockfile) Iterator { - var iter = Iterator{ - .tree_id = 0, - .lockfile = lockfile, - }; - @memcpy(iter.path_buf[0.."node_modules".len], "node_modules"); - return iter; - } + pub const Next = struct { + relative_path: stringZ, + dependencies: []const DependencyID, + tree_id: Tree.Id, - pub fn reset(this: *Iterator) void { - this.tree_id = 0; - } + /// depth of the node_modules folder in the tree + /// + /// 0 (./node_modules) + /// / \ + /// 1 1 + /// / + /// 2 + depth: usize, + }; - pub fn nextNodeModulesFolder(this: *Iterator, completed_trees: ?*Bitset) ?NodeModulesFolder { - const trees = this.lockfile.buffers.trees.items; + pub fn next(this: *@This(), completed_trees: if (path_style == .node_modules) ?*Bitset else void) ?Next { + const trees = this.lockfile.buffers.trees.items; - if (this.tree_id >= trees.len) return null; + if (this.tree_id >= trees.len) return null; - while (trees[this.tree_id].dependencies.len == 0) { - if (completed_trees) |_completed_trees| { - _completed_trees.set(this.tree_id); + while (trees[this.tree_id].dependencies.len == 0) { + if (comptime path_style == .node_modules) { + if (completed_trees) |_completed_trees| { + _completed_trees.set(this.tree_id); + } + } + this.tree_id += 1; + if (this.tree_id >= trees.len) return null; } - this.tree_id += 1; - if (this.tree_id >= trees.len) return null; - } - const current_tree_id = this.tree_id; - const tree = trees[current_tree_id]; - const tree_dependencies = tree.dependencies.get(this.lockfile.buffers.hoisted_dependencies.items); + const current_tree_id = this.tree_id; + const tree = trees[current_tree_id]; + const tree_dependencies = tree.dependencies.get(this.lockfile.buffers.hoisted_dependencies.items); - const relative_path, const depth = relativePathAndDepth( - this.lockfile, - current_tree_id, - &this.path_buf, - &this.depth_stack, - ); + const relative_path, const depth = relativePathAndDepth( + this.lockfile, + current_tree_id, + &this.path_buf, + &this.depth_stack, + path_style, + ); - this.tree_id += 1; + this.tree_id += 1; - return .{ - .relative_path = relative_path, - .dependencies = tree_dependencies, - .tree_id = current_tree_id, - .depth = depth, - }; - } - }; + return .{ + .relative_path = relative_path, + .dependencies = tree_dependencies, + .tree_id = current_tree_id, + .depth = depth, + }; + } + }; + } /// Returns relative path and the depth of the tree pub fn relativePathAndDepth( lockfile: *const Lockfile, tree_id: Id, path_buf: *bun.PathBuffer, - depth_buf: *Iterator.DepthBuf, + depth_buf: *DepthBuf, + comptime path_style: IteratorPathStyle, ) struct { stringZ, usize } { const trees = lockfile.buffers.trees.items; var depth: usize = 0; @@ -430,7 +582,10 @@ pub const Tree = struct { const tree = trees[tree_id]; var parent_id = tree.id; - var path_written: usize = "node_modules".len; + var path_written: usize = switch (comptime path_style) { + .node_modules => "node_modules".len, + .pkg_path => 0, + }; depth_buf[0] = 0; @@ -449,16 +604,25 @@ pub const Tree = struct { depth = depth_buf_len; while (depth_buf_len > 0) : (depth_buf_len -= 1) { - path_buf[path_written] = std.fs.path.sep; - path_written += 1; + if (comptime path_style == .pkg_path) { + if (depth_buf_len != depth) { + path_buf[path_written] = '/'; + path_written += 1; + } + } else { + path_buf[path_written] = std.fs.path.sep; + path_written += 1; + } const id = depth_buf[depth_buf_len]; - const name = dependencies[trees[id].dependency_id].name.slice(buf); + const name = trees[id].folderName(dependencies, buf); @memcpy(path_buf[path_written..][0..name.len], name); path_written += name.len; - @memcpy(path_buf[path_written..][0.."/node_modules".len], std.fs.path.sep_str ++ "node_modules"); - path_written += "/node_modules".len; + if (comptime path_style == .node_modules) { + @memcpy(path_buf[path_written..][0.."/node_modules".len], std.fs.path.sep_str ++ "node_modules"); + path_written += "/node_modules".len; + } } } path_buf[path_written] = 0; @@ -577,7 +741,6 @@ pub const Tree = struct { try next.hoistDependency( true, pid, - dep_id, &dependency, dependency_lists, trees, @@ -613,7 +776,6 @@ pub const Tree = struct { this: *Tree, comptime as_defined: bool, package_id: PackageID, - dependency_id: DependencyID, dependency: *const Dependency, dependency_lists: []Lockfile.DependencyIDList, trees: []Tree, @@ -681,7 +843,6 @@ pub const Tree = struct { const id = trees[this.parent].hoistDependency( false, package_id, - dependency_id, dependency, dependency_lists, trees, @@ -695,7 +856,7 @@ pub const Tree = struct { } }; -/// This conditonally clones the lockfile with root packages marked as non-resolved +/// This conditionally clones the lockfile with root packages marked as non-resolved /// that do not satisfy `Features`. The package may still end up installed even /// if it was e.g. in "devDependencies" and its a production install. In that case, /// it would be installed because another dependency or transient dependency needed it. @@ -758,8 +919,8 @@ fn preprocessUpdateRequests(old: *Lockfile, manager: *PackageManager, updates: [ if (old_resolution > old.packages.len) continue; const res = resolutions_of_yore[old_resolution]; const len = switch (exact_versions) { - false => std.fmt.count("^{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}), - true => std.fmt.count("{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}), + false => std.fmt.count("^{}", .{res.value.npm.version.fmt(old.buffers.string_bytes.items)}), + true => std.fmt.count("{}", .{res.value.npm.version.fmt(old.buffers.string_bytes.items)}), }; if (len >= String.max_inline_len) { string_builder.cap += len; @@ -789,8 +950,8 @@ fn preprocessUpdateRequests(old: *Lockfile, manager: *PackageManager, updates: [ if (old_resolution > old.packages.len) continue; const res = resolutions_of_yore[old_resolution]; const buf = switch (exact_versions) { - false => std.fmt.bufPrint(&temp_buf, "^{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}) catch break, - true => std.fmt.bufPrint(&temp_buf, "{}", .{res.value.npm.fmt(old.buffers.string_bytes.items)}) catch break, + false => std.fmt.bufPrint(&temp_buf, "^{}", .{res.value.npm.version.fmt(old.buffers.string_bytes.items)}) catch break, + true => std.fmt.bufPrint(&temp_buf, "{}", .{res.value.npm.version.fmt(old.buffers.string_bytes.items)}) catch break, }; const external_version = string_builder.append(ExternalString, buf); const sliced = external_version.value.sliced(old.buffers.string_bytes.items); @@ -860,6 +1021,7 @@ pub fn getWorkspacePkgIfWorkspaceDep(this: *const Lockfile, id: DependencyID) Pa } /// Does this tree id belong to a workspace (including workspace root)? +/// TODO(dylan-conway) fix! pub fn isWorkspaceTreeId(this: *const Lockfile, id: Tree.Id) bool { return id == 0 or this.buffers.dependencies.items[this.buffers.trees.items[id].dependency_id].behavior.isWorkspaceOnly(); } @@ -1235,7 +1397,7 @@ pub const Printer = struct { var lockfile = try allocator.create(Lockfile); - const load_from_disk = lockfile.loadFromDisk(null, allocator, log, lockfile_path, false); + const load_from_disk = lockfile.loadFromCwd(null, allocator, log, false); switch (load_from_disk) { .err => |cause| { switch (cause.step) { @@ -1992,7 +2154,7 @@ pub fn verifyData(this: *const Lockfile) !void { } } -pub fn saveToDisk(this: *Lockfile, filename: stringZ, verbose_log: bool) void { +pub fn saveToDisk(this: *Lockfile, save_format: LoadResult.LockfileFormat, verbose_log: bool) void { if (comptime Environment.allow_assert) { this.verifyData() catch |err| { Output.prettyErrorln("error: failed to verify lockfile: {s}", .{@errorName(err)}); @@ -2001,10 +2163,16 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ, verbose_log: bool) void { assert(FileSystem.instance_loaded); } - var bytes = std.ArrayList(u8).init(bun.default_allocator); - defer bytes.deinit(); + const timer = std.time.Timer.start() catch unreachable; + const bytes = if (save_format == .text) + TextLockfile.Stringifier.saveFromBinary(bun.default_allocator, this) catch |err| { + switch (err) { + error.OutOfMemory => bun.outOfMemory(), + } + } + else bytes: { + var bytes = std.ArrayList(u8).init(bun.default_allocator); - { var total_size: usize = 0; var end_pos: usize = 0; Lockfile.Serializer.save(this, verbose_log, &bytes, &total_size, &end_pos) catch |err| { @@ -2013,12 +2181,19 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ, verbose_log: bool) void { }; if (bytes.items.len >= end_pos) bytes.items[end_pos..][0..@sizeOf(usize)].* = @bitCast(total_size); - } + break :bytes bytes.items; + }; + defer bun.default_allocator.free(bytes); + _ = timer; + // std.debug.print("time to write {s}: {}\n", .{ @tagName(save_format), bun.fmt.fmtDuration(timer.read()) }); var tmpname_buf: [512]u8 = undefined; var base64_bytes: [8]u8 = undefined; bun.rand(&base64_bytes); - const tmpname = std.fmt.bufPrintZ(&tmpname_buf, ".lockb-{s}.tmp", .{bun.fmt.fmtSliceHexLower(&base64_bytes)}) catch unreachable; + const tmpname = if (save_format == .text) + std.fmt.bufPrintZ(&tmpname_buf, ".lock-{s}.tmp", .{bun.fmt.fmtSliceHexLower(&base64_bytes)}) catch unreachable + else + std.fmt.bufPrintZ(&tmpname_buf, ".lockb-{s}.tmp", .{bun.fmt.fmtSliceHexLower(&base64_bytes)}) catch unreachable; const file = switch (File.openat(std.fs.cwd(), tmpname, bun.O.CREAT | bun.O.WRONLY, 0o777)) { .err => |err| { @@ -2028,7 +2203,7 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ, verbose_log: bool) void { .result => |f| f, }; - switch (file.writeAll(bytes.items)) { + switch (file.writeAll(bytes)) { .err => |e| { file.close(); _ = bun.sys.unlink(tmpname); @@ -2051,7 +2226,7 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ, verbose_log: bool) void { } } - file.closeAndMoveTo(tmpname, filename) catch |err| { + file.closeAndMoveTo(tmpname, save_format.filename()) catch |err| { bun.handleErrorReturnTrace(err, @errorReturnTrace()); // note: file is already closed here. @@ -2121,7 +2296,7 @@ pub fn getPackageID( const buf = this.buffers.string_bytes.items; switch (entry) { - .PackageID => |id| { + .id => |id| { if (comptime Environment.allow_assert) assert(id < resolutions.len); if (resolutions[id].eql(resolution, buf, buf)) { @@ -2132,7 +2307,7 @@ pub fn getPackageID( if (npm_version.?.satisfies(resolutions[id].value.npm.version, buf, buf)) return id; } }, - .PackageIDMultiple => |ids| { + .ids => |ids| { for (ids.items) |id| { if (comptime Environment.allow_assert) assert(id < resolutions.len); @@ -2150,14 +2325,79 @@ pub fn getPackageID( return null; } -pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) !void { +/// Appends `pkg` to `this.packages` if a duplicate isn't found +pub fn appendPackageDedupe(this: *Lockfile, pkg: *Package, buf: string) OOM!PackageID { + const entry = try this.package_index.getOrPut(pkg.name_hash); + + if (!entry.found_existing) { + const new_id: PackageID = @intCast(this.packages.len); + pkg.meta.id = new_id; + try this.packages.append(this.allocator, pkg.*); + entry.value_ptr.* = .{ .id = new_id }; + return new_id; + } + + const resolutions = this.packages.items(.resolution); + + return switch (entry.value_ptr.*) { + .id => |existing_id| { + if (pkg.resolution.eql(&resolutions[existing_id], buf, buf)) { + pkg.meta.id = existing_id; + return existing_id; + } + + const new_id: PackageID = @intCast(this.packages.len); + pkg.meta.id = new_id; + try this.packages.append(this.allocator, pkg.*); + + var ids = try PackageIDList.initCapacity(this.allocator, 8); + ids.items.len = 2; + + ids.items[0..2].* = if (pkg.resolution.order(&resolutions[existing_id], buf, buf) == .gt) + .{ new_id, existing_id } + else + .{ existing_id, new_id }; + + entry.value_ptr.* = .{ + .ids = ids, + }; + + return new_id; + }, + .ids => |*existing_ids| { + for (existing_ids.items) |existing_id| { + if (pkg.resolution.eql(&resolutions[existing_id], buf, buf)) { + pkg.meta.id = existing_id; + return existing_id; + } + } + + const new_id: PackageID = @intCast(this.packages.len); + pkg.meta.id = new_id; + try this.packages.append(this.allocator, pkg.*); + + for (existing_ids.items, 0..) |existing_id, i| { + if (pkg.resolution.order(&resolutions[existing_id], buf, buf) == .gt) { + try existing_ids.insert(this.allocator, i, new_id); + return new_id; + } + } + + try existing_ids.append(this.allocator, new_id); + + return new_id; + }, + }; +} + +pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) OOM!void { const gpe = try this.package_index.getOrPut(name_hash); if (gpe.found_existing) { const index: *PackageIndex.Entry = gpe.value_ptr; switch (index.*) { - .PackageID => |existing_id| { + .id => |existing_id| { var ids = try PackageIDList.initCapacity(this.allocator, 8); ids.items.len = 2; @@ -2170,10 +2410,10 @@ pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) !v .{ existing_id, id }; index.* = .{ - .PackageIDMultiple = ids, + .ids = ids, }; }, - .PackageIDMultiple => |*existing_ids| { + .ids => |*existing_ids| { const resolutions = this.packages.items(.resolution); const buf = this.buffers.string_bytes.items; @@ -2189,16 +2429,16 @@ pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) !v }, } } else { - gpe.value_ptr.* = .{ .PackageID = id }; + gpe.value_ptr.* = .{ .id = id }; } } -pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) !Lockfile.Package { +pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) OOM!Lockfile.Package { const id: PackageID = @truncate(this.packages.len); return try appendPackageWithID(this, package_, id); } -fn appendPackageWithID(this: *Lockfile, package_: Lockfile.Package, id: PackageID) !Lockfile.Package { +fn appendPackageWithID(this: *Lockfile, package_: Lockfile.Package, id: PackageID) OOM!Lockfile.Package { defer { if (comptime Environment.allow_assert) { assert(this.getPackageID(package_.name_hash, null, &package_.resolution) != null); @@ -2220,6 +2460,13 @@ pub inline fn stringBuilder(this: *Lockfile) Lockfile.StringBuilder { }; } +pub fn stringBuf(this: *Lockfile) String.Buf { + return .{ + .bytes = this.buffers.string_bytes.toManaged(this.allocator), + .pool = this.string_pool, + }; +} + pub const Scratch = struct { pub const DuplicateCheckerMap = std.HashMap(PackageNameHash, logger.Loc, IdentityContext(PackageNameHash), 80); pub const DependencyQueue = std.fifo.LinearFifo(DependencySlice, .Dynamic); @@ -2369,12 +2616,12 @@ pub const StringBuilder = struct { pub const PackageIndex = struct { pub const Map = std.HashMap(PackageNameHash, PackageIndex.Entry, IdentityContext(PackageNameHash), 80); pub const Entry = union(Tag) { - PackageID: PackageID, - PackageIDMultiple: PackageIDList, + id: PackageID, + ids: PackageIDList, pub const Tag = enum(u8) { - PackageID = 0, - PackageIDMultiple = 1, + id = 0, + ids = 1, }; }; }; @@ -2389,7 +2636,7 @@ pub const OverrideMap = struct { map: std.ArrayHashMapUnmanaged(PackageNameHash, Dependency, ArrayIdentityContext.U64, false) = .{}, /// In the future, this `get` function should handle multi-level resolutions. This is difficult right - /// now because given a Dependency ID, there is no fast way to trace it to it's package. + /// now because given a Dependency ID, there is no fast way to trace it to its package. /// /// A potential approach is to add another buffer to the lockfile that maps Dependency ID to Package ID, /// and from there `OverrideMap.map` can have a union as the value, where the union is between "override all" @@ -2918,7 +3165,7 @@ pub const Package = extern struct { } switch (resolution_tag) { - .git, .github, .gitlab, .root => { + .git, .github, .root => { const prepare_scripts = .{ "preprepare", "prepare", @@ -2988,7 +3235,7 @@ pub const Package = extern struct { .first_index = @intCast(first_index), .total = total, .cwd = allocator.dupeZ(u8, cwd) catch bun.outOfMemory(), - .package_name = package_name, + .package_name = lockfile.allocator.dupe(u8, package_name) catch bun.outOfMemory(), }; } @@ -3954,7 +4201,7 @@ pub const Package = extern struct { comptime features: Features, ) !void { initializeStore(); - const json = JSON.parsePackageJSONUTF8AlwaysDecode(&source, log, allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(&source, log, allocator) catch |err| { log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), source.path.prettyDir() }); Global.crash(); @@ -6324,7 +6571,7 @@ pub const Serializer = struct { lockfile.scratch = Lockfile.Scratch.init(allocator); lockfile.package_index = PackageIndex.Map.initContext(allocator, .{}); - lockfile.string_pool = StringPool.initContext(allocator, .{}); + lockfile.string_pool = StringPool.init(allocator); try lockfile.package_index.ensureTotalCapacity(@as(u32, @truncate(lockfile.packages.len))); if (!has_workspace_name_hashes) { @@ -6471,7 +6718,7 @@ pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Ve switch (version.tag) { .npm => switch (entry) { - .PackageID => |id| { + .id => |id| { const resolutions = this.packages.items(.resolution); if (comptime Environment.allow_assert) assert(id < resolutions.len); @@ -6479,7 +6726,7 @@ pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Ve return id; } }, - .PackageIDMultiple => |ids| { + .ids => |ids| { const resolutions = this.packages.items(.resolution); for (ids.items) |id| { @@ -6569,7 +6816,6 @@ pub fn hasTrustedDependency(this: *Lockfile, name: []const u8) bool { pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep_id: DependencyID, dep: Dependency, res: PackageID) !void { const sb = this.buffers.string_bytes.items; - var buf: [2048]u8 = undefined; try w.beginObject(); defer w.endObject() catch {}; @@ -6598,7 +6844,7 @@ pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep_id: Depend try w.write(info.name.slice(sb)); try w.objectField("version"); - try w.write(try std.fmt.bufPrint(&buf, "{}", .{info.version.fmt(sb)})); + try w.print("\"{}\"", .{info.version.fmt(sb)}); }, .dist_tag => { try w.beginObject(); @@ -6692,7 +6938,6 @@ pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep_id: Depend } pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { - var buf: [2048]u8 = undefined; const sb = this.buffers.string_bytes.items; try w.beginObject(); defer w.endObject() catch {}; @@ -6711,14 +6956,14 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { while (iter.next()) |it| { const entry: PackageIndex.Entry = it.value_ptr.*; const first_id = switch (entry) { - .PackageID => |id| id, - .PackageIDMultiple => |ids| ids.items[0], + .id => |id| id, + .ids => |ids| ids.items[0], }; const name = this.packages.items(.name)[first_id].slice(sb); try w.objectField(name); switch (entry) { - .PackageID => |id| try w.write(id), - .PackageIDMultiple => |ids| { + .id => |id| try w.write(id), + .ids => |ids| { try w.beginArray(); for (ids.items) |id| { try w.write(id); @@ -6736,7 +6981,7 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { const dependencies = this.buffers.dependencies.items; const hoisted_deps = this.buffers.hoisted_dependencies.items; const resolutions = this.buffers.resolutions.items; - var depth_buf: Tree.Iterator.DepthBuf = undefined; + var depth_buf: Tree.DepthBuf = undefined; var path_buf: bun.PathBuffer = undefined; @memcpy(path_buf[0.."node_modules".len], "node_modules"); @@ -6754,11 +6999,11 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { @intCast(tree_id), &path_buf, &depth_buf, + .node_modules, ); try w.objectField("path"); - const formatted = try std.fmt.bufPrint(&buf, "{}", .{bun.fmt.fmtPath(u8, relative_path, .{ .path_sep = .posix })}); - try w.write(formatted); + try w.print("\"{}\"", .{bun.fmt.fmtPath(u8, relative_path, .{ .path_sep = .posix })}); try w.objectField("depth"); try w.write(depth); @@ -6832,12 +7077,10 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { try w.write(@tagName(res.tag)); try w.objectField("value"); - const formatted = try std.fmt.bufPrint(&buf, "{s}", .{res.fmt(sb, .posix)}); - try w.write(formatted); + try w.print("\"{s}\"", .{res.fmt(sb, .posix)}); try w.objectField("resolved"); - const formatted_url = try std.fmt.bufPrint(&buf, "{}", .{res.fmtURL(sb)}); - try w.write(formatted_url); + try w.print("\"{}\"", .{res.fmtURL(sb)}); } try w.objectField("dependencies"); @@ -6876,7 +7119,7 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { try w.objectField("integrity"); if (pkg.meta.integrity.tag != .unknown) { - try w.write(try std.fmt.bufPrint(&buf, "{}", .{pkg.meta.integrity})); + try w.print("\"{}\"", .{pkg.meta.integrity}); } else { try w.write(null); } @@ -6940,13 +7183,15 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { } } + var buf: [100]u8 = undefined; + try w.objectField("workspace_paths"); { try w.beginObject(); defer w.endObject() catch {}; for (this.workspace_paths.keys(), this.workspace_paths.values()) |k, v| { - try w.objectField(try std.fmt.bufPrint(&buf, "{d}", .{k})); + try w.objectField(std.fmt.bufPrintIntToSlice(&buf, k, 10, .lower, .{})); try w.write(v.slice(sb)); } } @@ -6956,8 +7201,8 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void { defer w.endObject() catch {}; for (this.workspace_versions.keys(), this.workspace_versions.values()) |k, v| { - try w.objectField(try std.fmt.bufPrint(&buf, "{d}", .{k})); - try w.write(try std.fmt.bufPrint(&buf, "{}", .{v.fmt(sb)})); + try w.objectField(std.fmt.bufPrintIntToSlice(&buf, k, 10, .lower, .{})); + try w.print("\"{}\"", .{v.fmt(sb)}); } } } diff --git a/src/install/migration.zig b/src/install/migration.zig index 87dfc373cab4cc..ca04d48732f4b1 100644 --- a/src/install/migration.zig +++ b/src/install/migration.zig @@ -10,6 +10,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const logger = bun.logger; +const File = bun.sys.File; const Install = @import("./install.zig"); const Resolution = @import("./resolution.zig").Resolution; @@ -25,7 +26,7 @@ const ExternalString = Semver.ExternalString; const stringHash = String.Builder.stringHash; const Lockfile = @import("./lockfile.zig"); -const LoadFromDiskResult = Lockfile.LoadFromDiskResult; +const LoadResult = Lockfile.LoadResult; const JSAst = bun.JSAst; const Expr = JSAst.Expr; @@ -38,32 +39,21 @@ const debug = Output.scoped(.migrate, false); pub fn detectAndLoadOtherLockfile( this: *Lockfile, + dir: bun.FD, manager: *Install.PackageManager, allocator: Allocator, log: *logger.Log, - bun_lockfile_path: stringZ, -) LoadFromDiskResult { - const dirname = bun_lockfile_path[0 .. strings.lastIndexOfChar(bun_lockfile_path, '/') orelse 0]; +) LoadResult { // check for package-lock.json, yarn.lock, etc... // if it exists, do an in-memory migration - var buf: bun.PathBuffer = undefined; - @memcpy(buf[0..dirname.len], dirname); npm: { - const npm_lockfile_name = "package-lock.json"; - @memcpy(buf[dirname.len .. dirname.len + npm_lockfile_name.len], npm_lockfile_name); - buf[dirname.len + npm_lockfile_name.len] = 0; var timer = std.time.Timer.start() catch unreachable; - const lockfile = bun.sys.openat( - bun.FD.cwd(), - buf[0 .. dirname.len + npm_lockfile_name.len :0], - bun.O.RDONLY, - 0, - ).unwrap() catch break :npm; - defer _ = bun.sys.close(lockfile); + const lockfile = File.openat(dir, "package-lock.json", bun.O.RDONLY, 0).unwrap() catch break :npm; + defer lockfile.close(); var lockfile_path_buf: bun.PathBuffer = undefined; - const lockfile_path = bun.getFdPathZ(lockfile, &lockfile_path_buf) catch break :npm; - const data = bun.sys.File.from(lockfile).readToEnd(allocator).unwrap() catch break :npm; + const lockfile_path = bun.getFdPathZ(lockfile.handle, &lockfile_path_buf) catch break :npm; + const data = lockfile.readToEnd(allocator).unwrap() catch break :npm; const migrate_result = migrateNPMLockfile(this, manager, allocator, log, data, lockfile_path) catch |err| { if (err == error.NPMLockfileVersionMismatch) { Output.prettyErrorln( @@ -81,7 +71,12 @@ pub fn detectAndLoadOtherLockfile( Output.prettyErrorln("Invalid NPM package-lock.json\nIn a release build, this would ignore and do a fresh install.\nAborting", .{}); Global.exit(1); } - return LoadFromDiskResult{ .err = .{ .step = .migrating, .value = err } }; + return LoadResult{ .err = .{ + .step = .migrating, + .value = err, + .lockfile_path = "package-lock.json", + .format = .binary, + } }; }; if (migrate_result == .ok) { @@ -94,7 +89,7 @@ pub fn detectAndLoadOtherLockfile( return migrate_result; } - return LoadFromDiskResult{ .not_found = {} }; + return LoadResult{ .not_found = {} }; } const ResolvedURLsMap = bun.StringHashMapUnmanaged(string); @@ -130,7 +125,7 @@ pub fn migrateNPMLockfile( log: *logger.Log, data: string, abs_path: string, -) !LoadFromDiskResult { +) !LoadResult { debug("begin lockfile migration", .{}); this.initEmpty(allocator); @@ -553,7 +548,7 @@ pub fn migrateNPMLockfile( } else .false, .integrity = if (pkg.get("integrity")) |integrity| - try Integrity.parse( + Integrity.parse( integrity.asString(this.allocator) orelse return error.InvalidNPMLockfile, ) @@ -1068,7 +1063,7 @@ pub fn migrateNPMLockfile( // } // This is definitely a memory leak, but it's fine because there is no install api, so this can only be leaked once per process. - // This operation is neccecary because callers of `loadFromDisk` assume the data is written into the passed `this`. + // This operation is neccecary because callers of `loadFromCwd` assume the data is written into the passed `this`. // You'll find that not cleaning the lockfile will cause `bun install` to not actually install anything since it doesnt have any hoisted trees. this.* = (try this.cleanWithLogger(manager, &.{}, log, false, .silent)).*; @@ -1084,11 +1079,13 @@ pub fn migrateNPMLockfile( this.meta_hash = try this.generateMetaHash(false, this.packages.len); - return LoadFromDiskResult{ + return LoadResult{ .ok = .{ .lockfile = this, .was_migrated = true, + .loaded_from_binary_lockfile = false, .serializer_result = .{}, + .format = .text, }, }; } diff --git a/src/install/npm.zig b/src/install/npm.zig index 4c5210f86d9440..3eeb230c780ee9 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -530,7 +530,7 @@ const ExternVersionMap = extern struct { } }; -fn Negatable(comptime T: type) type { +pub fn Negatable(comptime T: type) type { return struct { added: T = T.none, removed: T = T.none, @@ -578,6 +578,11 @@ fn Negatable(comptime T: type) type { return; } + if (strings.eqlComptime(str, "none")) { + this.had_unrecognized_values = true; + return; + } + const is_not = str[0] == '!'; const offset: usize = @intFromBool(is_not); @@ -593,6 +598,74 @@ fn Negatable(comptime T: type) type { this.* = .{ .added = @enumFromInt(@intFromEnum(this.added) | field), .removed = this.removed }; } } + + pub fn fromJson(allocator: std.mem.Allocator, expr: JSON.Expr) OOM!T { + var this = T.none.negatable(); + switch (expr.data) { + .e_array => |arr| { + const items = arr.slice(); + if (items.len > 0) { + for (items) |item| { + if (item.asString(allocator)) |value| { + this.apply(value); + } + } + } + }, + .e_string => |str| { + this.apply(str.data); + }, + else => {}, + } + + return this.combine(); + } + + /// writes to a one line json array with a trailing comma and space, or writes a string + pub fn toJson(field: T, writer: anytype) @TypeOf(writer).Error!void { + if (field == .none) { + // [] means everything, so unrecognized value + try writer.writeAll( + \\"none" + ); + return; + } + + const kvs = T.NameMap.kvs; + var removed: u8 = 0; + for (kvs) |kv| { + if (!field.has(kv.value)) { + removed += 1; + } + } + const included = kvs.len - removed; + const print_included = removed > kvs.len - removed; + + const one = (print_included and included == 1) or (!print_included and removed == 1); + + if (!one) { + try writer.writeAll("[ "); + } + + for (kvs) |kv| { + const has = field.has(kv.value); + if (has and print_included) { + try writer.print( + \\"{s}" + , .{kv.key}); + if (one) return; + try writer.writeAll(", "); + } else if (!has and !print_included) { + try writer.print( + \\"!{s}" + , .{kv.key}); + if (one) return; + try writer.writeAll(", "); + } + } + + try writer.writeByte(']'); + } }; } @@ -1759,69 +1832,15 @@ pub const PackageManifest = struct { var package_version: PackageVersion = empty_version; if (prop.value.?.asProperty("cpu")) |cpu_q| { - var cpu = Architecture.none.negatable(); - - switch (cpu_q.expr.data) { - .e_array => |arr| { - const items = arr.slice(); - if (items.len > 0) { - for (items) |item| { - if (item.asString(allocator)) |cpu_str_| { - cpu.apply(cpu_str_); - } - } - } - }, - .e_string => |stri| { - cpu.apply(stri.data); - }, - else => {}, - } - package_version.cpu = cpu.combine(); + package_version.cpu = try Negatable(Architecture).fromJson(allocator, cpu_q.expr); } if (prop.value.?.asProperty("os")) |os_q| { - var os = OperatingSystem.none.negatable(); - - switch (os_q.expr.data) { - .e_array => |arr| { - const items = arr.slice(); - if (items.len > 0) { - for (items) |item| { - if (item.asString(allocator)) |cpu_str_| { - os.apply(cpu_str_); - } - } - } - }, - .e_string => |stri| { - os.apply(stri.data); - }, - else => {}, - } - package_version.os = os.combine(); + package_version.os = try Negatable(OperatingSystem).fromJson(allocator, os_q.expr); } if (prop.value.?.asProperty("libc")) |libc| { - var libc_ = Libc.none.negatable(); - - switch (libc.expr.data) { - .e_array => |arr| { - const items = arr.slice(); - if (items.len > 0) { - for (items) |item| { - if (item.asString(allocator)) |libc_str_| { - libc_.apply(libc_str_); - } - } - } - }, - .e_string => |stri| { - libc_.apply(stri.data); - }, - else => {}, - } - package_version.libc = libc_.combine(); + package_version.libc = try Negatable(Libc).fromJson(allocator, libc.expr); } if (prop.value.?.asProperty("hasInstallScript")) |has_install_script| { @@ -1973,7 +1992,7 @@ pub const PackageManifest = struct { if (dist.expr.asProperty("integrity")) |shasum| { if (shasum.expr.asString(allocator)) |shasum_str| { - package_version.integrity = Integrity.parse(shasum_str) catch Integrity{}; + package_version.integrity = Integrity.parse(shasum_str); if (package_version.integrity.tag.isSupported()) break :integrity; } } diff --git a/src/install/patch_install.zig b/src/install/patch_install.zig index 3005a0c548e41c..cb8b932aa12960 100644 --- a/src/install/patch_install.zig +++ b/src/install/patch_install.zig @@ -9,6 +9,7 @@ const Environment = bun.Environment; const strings = bun.strings; const MutableString = bun.MutableString; const Progress = bun.Progress; +const String = bun.Semver.String; const logger = bun.logger; const Loc = logger.Loc; @@ -80,7 +81,7 @@ pub const PatchTask = struct { name_and_version_hash: u64, resolution: *const Resolution, patchfilepath: []const u8, - pkgname: []const u8, + pkgname: String, cache_dir: std.fs.Dir, cache_dir_subpath: stringZ, @@ -103,7 +104,6 @@ pub const PatchTask = struct { .apply => { this.manager.allocator.free(this.callback.apply.patchfilepath); this.manager.allocator.free(this.callback.apply.cache_dir_subpath); - this.manager.allocator.free(this.callback.apply.pkgname); if (this.callback.apply.install_context) |ictx| ictx.path.deinit(); this.callback.apply.logger.deinit(); }, @@ -564,7 +564,7 @@ pub const PatchTask = struct { .name_and_version_hash = name_and_version_hash, .cache_dir = stuff.cache_dir, .patchfilepath = patchfilepath, - .pkgname = pkg_manager.allocator.dupe(u8, pkg_name.slice(pkg_manager.lockfile.buffers.string_bytes.items)) catch bun.outOfMemory(), + .pkgname = pkg_name, .logger = logger.Log.init(pkg_manager.allocator), // need to dupe this as it's calculated using // `PackageManager.cached_package_folder_name_buf` which may be diff --git a/src/install/repository.zig b/src/install/repository.zig index 848731b7cbd6ee..8e016bf9656589 100644 --- a/src/install/repository.zig +++ b/src/install/repository.zig @@ -17,6 +17,7 @@ const strings = @import("../string_immutable.zig"); const GitSHA = String; const Path = bun.path; const File = bun.sys.File; +const OOM = bun.OOM; threadlocal var final_path_buf: bun.PathBuffer = undefined; threadlocal var ssh_path_buf: bun.PathBuffer = undefined; @@ -181,6 +182,51 @@ pub const Repository = extern struct { .{ "gitlab", ".com" }, }); + pub fn parseAppendGit(input: string, buf: *String.Buf) OOM!Repository { + var remain = input; + if (strings.hasPrefixComptime(remain, "git+")) { + remain = remain["git+".len..]; + } + if (strings.lastIndexOfChar(remain, '#')) |hash| { + return .{ + .repo = try buf.append(remain[0..hash]), + .committish = try buf.append(remain[hash + 1 ..]), + }; + } + return .{ + .repo = try buf.append(remain), + }; + } + + pub fn parseAppendGithub(input: string, buf: *String.Buf) OOM!Repository { + var remain = input; + if (strings.hasPrefixComptime(remain, "github:")) { + remain = remain["github:".len..]; + } + var hash: usize = 0; + var slash: usize = 0; + for (remain, 0..) |c, i| { + switch (c) { + '/' => slash = i, + '#' => hash = i, + else => {}, + } + } + + const repo = if (hash == 0) remain[slash + 1 ..] else remain[slash + 1 .. hash]; + + var result: Repository = .{ + .owner = try buf.append(remain[0..slash]), + .repo = try buf.append(repo), + }; + + if (hash != 0) { + result.committish = try buf.append(remain[hash + 1 ..]); + } + + return result; + } + pub fn createDependencyNameFromVersionLiteral( allocator: std.mem.Allocator, repository: *const Repository, @@ -260,6 +306,14 @@ pub const Repository = extern struct { return try formatter.format(layout, opts, writer); } + pub fn fmt(this: *const Repository, label: string, buf: []const u8) Formatter { + return .{ + .repository = this, + .buf = buf, + .label = label, + }; + } + pub const Formatter = struct { label: []const u8 = "", buf: []const u8, diff --git a/src/install/resolution.zig b/src/install/resolution.zig index 2fecc21aff94d2..d51829b1d40dbd 100644 --- a/src/install/resolution.zig +++ b/src/install/resolution.zig @@ -10,6 +10,9 @@ const strings = @import("../string_immutable.zig"); const VersionedURL = @import("./versioned_url.zig").VersionedURL; const bun = @import("root").bun; const Path = bun.path; +const JSON = bun.JSON; +const OOM = bun.OOM; +const Dependency = bun.install.Dependency; pub const Resolution = extern struct { tag: Tag = .uninitialized, @@ -32,6 +35,74 @@ pub const Resolution = extern struct { return this.tag.canEnqueueInstallTask(); } + const FromTextLockfileError = OOM || error{ + UnexpectedResolution, + InvalidSemver, + }; + + pub fn fromTextLockfile(res_str: string, string_buf: *String.Buf) FromTextLockfileError!Resolution { + if (strings.hasPrefixComptime(res_str, "root:")) { + return Resolution.init(.{ .root = {} }); + } + + if (strings.withoutPrefixIfPossibleComptime(res_str, "link:")) |link| { + return Resolution.init(.{ .symlink = try string_buf.append(link) }); + } + + if (strings.withoutPrefixIfPossibleComptime(res_str, "workspace:")) |workspace| { + return Resolution.init(.{ .workspace = try string_buf.append(workspace) }); + } + + if (strings.withoutPrefixIfPossibleComptime(res_str, "file:")) |folder| { + return Resolution.init(.{ .folder = try string_buf.append(folder) }); + } + + return switch (Dependency.Version.Tag.infer(res_str)) { + .git => Resolution.init(.{ .git = try Repository.parseAppendGit(res_str, string_buf) }), + .github => Resolution.init(.{ .github = try Repository.parseAppendGithub(res_str, string_buf) }), + .tarball => { + if (Dependency.isRemoteTarball(res_str)) { + return Resolution.init(.{ .remote_tarball = try string_buf.append(res_str) }); + } + + return Resolution.init(.{ .local_tarball = try string_buf.append(res_str) }); + }, + .npm => { + const version_literal = try string_buf.append(res_str); + const parsed = Semver.Version.parse(version_literal.sliced(string_buf.bytes.items)); + + if (!parsed.valid) { + return error.UnexpectedResolution; + } + + if (parsed.version.major == null or parsed.version.minor == null or parsed.version.patch == null) { + return error.UnexpectedResolution; + } + + return .{ + .tag = .npm, + .value = .{ + .npm = .{ + .version = parsed.version.min(), + + // will fill this later + .url = .{}, + }, + }, + }; + }, + + // covered above + .workspace => error.UnexpectedResolution, + .symlink => error.UnexpectedResolution, + .folder => error.UnexpectedResolution, + + // should not happen + .dist_tag => error.UnexpectedResolution, + .uninitialized => error.UnexpectedResolution, + }; + } + pub fn order( lhs: *const Resolution, rhs: *const Resolution, @@ -52,7 +123,6 @@ pub const Resolution = extern struct { .single_file_module => lhs.value.single_file_module.order(&rhs.value.single_file_module, lhs_buf, rhs_buf), .git => lhs.value.git.order(&rhs.value.git, lhs_buf, rhs_buf), .github => lhs.value.github.order(&rhs.value.github, lhs_buf, rhs_buf), - .gitlab => lhs.value.gitlab.order(&rhs.value.gitlab, lhs_buf, rhs_buf), else => .eq, }; } @@ -68,7 +138,6 @@ pub const Resolution = extern struct { .single_file_module => builder.count(this.value.single_file_module.slice(buf)), .git => this.value.git.count(buf, Builder, builder), .github => this.value.github.count(buf, Builder, builder), - .gitlab => this.value.gitlab.count(buf, Builder, builder), else => {}, } } @@ -102,9 +171,6 @@ pub const Resolution = extern struct { .github => Value.init(.{ .github = this.value.github.clone(buf, Builder, builder), }), - .gitlab => Value.init(.{ - .gitlab = this.value.gitlab.clone(buf, Builder, builder), - }), .root => Value.init(.{ .root = {} }), else => { std.debug.panic("Internal error: unexpected resolution tag: {}", .{this.tag}); @@ -180,11 +246,6 @@ pub const Resolution = extern struct { lhs_string_buf, rhs_string_buf, ), - .gitlab => lhs.value.gitlab.eql( - &rhs.value.gitlab, - lhs_string_buf, - rhs_string_buf, - ), else => unreachable, }; } @@ -204,7 +265,6 @@ pub const Resolution = extern struct { .remote_tarball => try writer.writeAll(value.remote_tarball.slice(formatter.buf)), .git => try value.git.formatAs("git+", formatter.buf, layout, opts, writer), .github => try value.github.formatAs("github:", formatter.buf, layout, opts, writer), - .gitlab => try value.gitlab.formatAs("gitlab:", formatter.buf, layout, opts, writer), .workspace => try std.fmt.format(writer, "workspace:{s}", .{value.workspace.slice(formatter.buf)}), .symlink => try std.fmt.format(writer, "link:{s}", .{value.symlink.slice(formatter.buf)}), .single_file_module => try std.fmt.format(writer, "module:{s}", .{value.single_file_module.slice(formatter.buf)}), @@ -228,7 +288,6 @@ pub const Resolution = extern struct { .remote_tarball => try writer.writeAll(value.remote_tarball.slice(buf)), .git => try value.git.formatAs("git+", buf, layout, opts, writer), .github => try value.github.formatAs("github:", buf, layout, opts, writer), - .gitlab => try value.gitlab.formatAs("gitlab:", buf, layout, opts, writer), .workspace => try std.fmt.format(writer, "workspace:{s}", .{bun.fmt.fmtPath(u8, value.workspace.slice(buf), .{ .path_sep = formatter.path_sep, })}), @@ -256,7 +315,6 @@ pub const Resolution = extern struct { .remote_tarball => try writer.writeAll(formatter.resolution.value.remote_tarball.slice(formatter.buf)), .git => try formatter.resolution.value.git.formatAs("git+", formatter.buf, layout, opts, writer), .github => try formatter.resolution.value.github.formatAs("github:", formatter.buf, layout, opts, writer), - .gitlab => try formatter.resolution.value.gitlab.formatAs("gitlab:", formatter.buf, layout, opts, writer), .workspace => try std.fmt.format(writer, "workspace:{s}", .{formatter.resolution.value.workspace.slice(formatter.buf)}), .symlink => try std.fmt.format(writer, "link:{s}", .{formatter.resolution.value.symlink.slice(formatter.buf)}), .single_file_module => try std.fmt.format(writer, "module:{s}", .{formatter.resolution.value.single_file_module.slice(formatter.buf)}), @@ -282,7 +340,6 @@ pub const Resolution = extern struct { git: Repository, github: Repository, - gitlab: Repository, workspace: String, @@ -306,7 +363,6 @@ pub const Resolution = extern struct { local_tarball = 8, github = 16, - gitlab = 24, git = 32, @@ -338,7 +394,7 @@ pub const Resolution = extern struct { _, pub fn isGit(this: Tag) bool { - return this == .git or this == .github or this == .gitlab; + return this == .git or this == .github; } pub fn canEnqueueInstallTask(this: Tag) bool { diff --git a/src/install/semver.zig b/src/install/semver.zig index fc90129b67d560..84c7fdba0d9cdb 100644 --- a/src/install/semver.zig +++ b/src/install/semver.zig @@ -12,6 +12,9 @@ const default_allocator = bun.default_allocator; const C = bun.C; const JSC = bun.JSC; const IdentityContext = @import("../identity_context.zig").IdentityContext; +const OOM = bun.OOM; +const TruncatedPackageNameHash = bun.install.TruncatedPackageNameHash; +const Lockfile = bun.install.Lockfile; /// String type that stores either an offset/length into an external buffer or a string inline directly pub const String = extern struct { @@ -35,6 +38,106 @@ pub const String = extern struct { return String.init(inlinable_buffer, inlinable_buffer); } + pub const Buf = struct { + bytes: std.ArrayList(u8), + pool: Builder.StringPool, + + pub fn init(allocator: std.mem.Allocator) Buf { + return .{ + .bytes = std.ArrayList(u8).init(allocator), + .pool = Builder.StringPool.init(allocator), + }; + } + + pub fn apply(this: *Buf, lockfile: *Lockfile) void { + lockfile.buffers.string_bytes = this.bytes.moveToUnmanaged(); + lockfile.string_pool = this.pool; + } + + pub fn append(this: *Buf, str: string) OOM!String { + if (canInline(str)) { + return String.initInline(str); + } + + const hash = Builder.stringHash(str); + const entry = try this.pool.getOrPut(hash); + if (entry.found_existing) { + return entry.value_ptr.*; + } + + // new entry + const new = try String.initAppend(&this.bytes, str); + entry.value_ptr.* = new; + return new; + } + + pub fn appendWithHash(this: *Buf, str: string, hash: u64) OOM!String { + if (canInline(str)) { + return initInline(str); + } + + const entry = try this.pool.getOrPut(hash); + if (entry.found_existing) { + return entry.value_ptr.*; + } + + // new entry + const new = try String.initAppend(&this.bytes, str); + entry.value_ptr.* = new; + return new; + } + + pub fn appendExternal(this: *Buf, str: string) OOM!ExternalString { + const hash = Builder.stringHash(str); + + if (canInline(str)) { + return .{ + .value = String.initInline(str), + .hash = hash, + }; + } + + const entry = try this.pool.getOrPut(hash); + if (entry.found_existing) { + return .{ + .value = entry.value_ptr.*, + .hash = hash, + }; + } + + const new = try String.initAppend(&this.bytes, str); + entry.value_ptr.* = new; + return .{ + .value = new, + .hash = hash, + }; + } + + pub fn appendExternalWithHash(this: *Buf, str: string, hash: u64) OOM!ExternalString { + if (canInline(str)) { + return .{ + .value = initInline(str), + .hash = hash, + }; + } + + const entry = try this.pool.getOrPut(hash); + if (entry.found_existing) { + return .{ + .value = entry.value_ptr.*, + .hash = hash, + }; + } + + const new = try String.initAppend(&this.bytes, str); + entry.value_ptr.* = new; + return .{ + .value = new, + .hash = hash, + }; + } + }; + pub const Tag = enum { small, big, @@ -187,6 +290,60 @@ pub const String = extern struct { }; } + pub fn initInline( + in: string, + ) String { + bun.assertWithLocation(canInline(in), @src()); + return switch (in.len) { + 0 => .{}, + 1 => .{ .bytes = .{ in[0], 0, 0, 0, 0, 0, 0, 0 } }, + 2 => .{ .bytes = .{ in[0], in[1], 0, 0, 0, 0, 0, 0 } }, + 3 => .{ .bytes = .{ in[0], in[1], in[2], 0, 0, 0, 0, 0 } }, + 4 => .{ .bytes = .{ in[0], in[1], in[2], in[3], 0, 0, 0, 0 } }, + 5 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], 0, 0, 0 } }, + 6 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], 0, 0 } }, + 7 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], in[6], 0 } }, + 8 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], in[6], in[7] } }, + else => unreachable, + }; + } + + pub fn initAppendIfNeeded( + buf: *std.ArrayList(u8), + in: string, + ) OOM!String { + return switch (in.len) { + 0 => .{}, + 1 => .{ .bytes = .{ in[0], 0, 0, 0, 0, 0, 0, 0 } }, + 2 => .{ .bytes = .{ in[0], in[1], 0, 0, 0, 0, 0, 0 } }, + 3 => .{ .bytes = .{ in[0], in[1], in[2], 0, 0, 0, 0, 0 } }, + 4 => .{ .bytes = .{ in[0], in[1], in[2], in[3], 0, 0, 0, 0 } }, + 5 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], 0, 0, 0 } }, + 6 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], 0, 0 } }, + 7 => .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], in[6], 0 } }, + + max_inline_len => + // If they use the final bit, then it's a big string. + // This should only happen for non-ascii strings that are exactly 8 bytes. + // so that's an edge-case + if ((in[max_inline_len - 1]) >= 128) + try initAppend(buf, in) + else + .{ .bytes = .{ in[0], in[1], in[2], in[3], in[4], in[5], in[6], in[7] } }, + + else => try initAppend(buf, in), + }; + } + + pub fn initAppend( + buf: *std.ArrayList(u8), + in: string, + ) OOM!String { + try buf.appendSlice(in); + const in_buf = buf.items[buf.items.len - in.len ..]; + return @bitCast((@as(u64, 0) | @as(u64, @as(max_addressable_space, @truncate(@as(u64, @bitCast(Pointer.init(buf.items, in_buf))))))) | 1 << 63); + } + pub fn eql(this: String, that: String, this_buf: []const u8, that_buf: []const u8) bool { if (this.isInline() and that.isInline()) { return @as(u64, @bitCast(this.bytes)) == @as(u64, @bitCast(that.bytes)); diff --git a/src/install/versioned_url.zig b/src/install/versioned_url.zig index 0a06856d7ca9e6..ac31f98ece1521 100644 --- a/src/install/versioned_url.zig +++ b/src/install/versioned_url.zig @@ -13,10 +13,6 @@ pub const VersionedURL = extern struct { return this.version.order(other.version, lhs_buf, rhs_buf); } - pub fn fmt(this: VersionedURL, buf: []const u8) Semver.Version.Formatter { - return this.version.fmt(buf); - } - pub fn count(this: VersionedURL, buf: []const u8, comptime Builder: type, builder: Builder) void { this.version.count(buf, comptime Builder, builder); builder.count(this.url.slice(buf)); diff --git a/src/json_parser.zig b/src/json_parser.zig index 4d30b2d09018f2..22ef5f46329a79 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -34,7 +34,7 @@ const G = js_ast.G; const T = js_lexer.T; const E = js_ast.E; const Stmt = js_ast.Stmt; -const Expr = js_ast.Expr; +pub const Expr = js_ast.Expr; const Binding = js_ast.Binding; const Symbol = js_ast.Symbol; const Level = js_ast.Op.Level; @@ -347,14 +347,16 @@ fn JSONLikeParser_( }; } -// This is a special JSON parser that stops as soon as it finds +// This is a special JSON parser that stops as soon as it finds combinations of // { // "name": "NAME_IN_HERE", // "version": "VERSION_IN_HERE", +// "bin": ... or "directories": { "bin": ... } // } -// and then returns the name and version. -// More precisely, it stops as soon as it finds a top-level "name" and "version" property which are strings -// In most cases, it should perform zero heap allocations because it does not create arrays or objects (It just skips them) +// and then returns the name, version, and bin +// More precisely, it stops as soon as it finds a top-level "name" and "version" (and/or "bin"). +// In most cases, it should perform zero heap allocations because it does not create arrays or objects (It just skips them). +// If searching for "bin", objects are only created if the key is top level "bin". "bin" within "directories" can only be a string. pub const PackageJSONVersionChecker = struct { const Lexer = js_lexer.NewLexer(opts); @@ -369,9 +371,14 @@ pub const PackageJSONVersionChecker = struct { found_name: []const u8 = "", found_version: []const u8 = "", + found_bin: union(enum) { + bin: Expr, + dir: Expr, + } = .{ .bin = Expr.empty }, has_found_name: bool = false, has_found_version: bool = false, + has_found_bin: bool = false, name_loc: logger.Loc = logger.Loc.Empty, @@ -382,21 +389,24 @@ pub const PackageJSONVersionChecker = struct { .allow_comments = true, }; - pub fn init(allocator: std.mem.Allocator, source: *const logger.Source, log: *logger.Log) !Parser { + pub fn init(allocator: std.mem.Allocator, source: *const logger.Source, log: *logger.Log, checks: enum { check_for_bin, ignore_bin, only_bin }) !Parser { return Parser{ .lexer = try Lexer.init(log, source.*, allocator), .allocator = allocator, .log = log, .source = source, + .has_found_bin = checks == .ignore_bin, + .has_found_name = checks == .only_bin, + .has_found_version = checks == .only_bin, }; } const Parser = @This(); - pub fn parseExpr(p: *Parser) anyerror!Expr { + pub fn parseExpr(p: *Parser, collect_props: bool, parent_is_directories: bool) anyerror!Expr { const loc = p.lexer.loc(); - if (p.has_found_name and p.has_found_version) return newExpr(E.Missing{}, loc); + if (p.has_found_name and p.has_found_version and p.has_found_bin) return newExpr(E.Missing{}, loc); switch (p.lexer.token) { .t_false => { @@ -443,7 +453,7 @@ pub const PackageJSONVersionChecker = struct { } } - _ = try p.parseExpr(); + _ = try p.parseExpr(false, false); has_exprs = true; } @@ -455,6 +465,8 @@ pub const PackageJSONVersionChecker = struct { p.depth += 1; defer p.depth -= 1; + var properties = std.ArrayList(G.Property).init(p.allocator); + var has_properties = false; while (p.lexer.token != .t_close_brace) { if (has_properties) { @@ -471,39 +483,95 @@ pub const PackageJSONVersionChecker = struct { try p.lexer.expect(.t_colon); - const value = try p.parseExpr(); + var collect_prop_props = false; + var is_directories = false; + + if (!p.has_found_bin and + p.depth == 1 and + // next is going to be a top level property + // with an object value. check if it is "bin" + // or "directories" + p.lexer.token == .t_open_brace and + key.data == .e_string) + { + if (strings.eqlComptime(key.data.e_string.data, "bin")) { + collect_prop_props = true; + } else if (strings.eqlComptime(key.data.e_string.data, "directories")) { + is_directories = true; + } + + // if bin is in directories it can only be a string, so + // don't need to set collect_prop_props when depth == 2 + // and in parent_is_directories == true. + + } + + const value = try p.parseExpr(collect_prop_props, is_directories); if (p.depth == 1) { // if you have multiple "name" fields in the package.json.... // first one wins - if (key.data == .e_string and value.data == .e_string) { - if (!p.has_found_name and strings.eqlComptime(key.data.e_string.data, "name")) { - const len = @min( - value.data.e_string.data.len, - p.found_name_buf.len, - ); - - bun.copy(u8, &p.found_name_buf, value.data.e_string.data[0..len]); - p.found_name = p.found_name_buf[0..len]; - p.has_found_name = true; - p.name_loc = value.loc; - } else if (!p.has_found_version and strings.eqlComptime(key.data.e_string.data, "version")) { - const len = @min( - value.data.e_string.data.len, - p.found_version_buf.len, - ); - bun.copy(u8, &p.found_version_buf, value.data.e_string.data[0..len]); - p.found_version = p.found_version_buf[0..len]; - p.has_found_version = true; + if (key.data == .e_string) { + if (value.data == .e_string) { + if (!p.has_found_name and strings.eqlComptime(key.data.e_string.data, "name")) { + const len = @min( + value.data.e_string.data.len, + p.found_name_buf.len, + ); + + bun.copy(u8, &p.found_name_buf, value.data.e_string.data[0..len]); + p.found_name = p.found_name_buf[0..len]; + p.has_found_name = true; + p.name_loc = value.loc; + } else if (!p.has_found_version and strings.eqlComptime(key.data.e_string.data, "version")) { + const len = @min( + value.data.e_string.data.len, + p.found_version_buf.len, + ); + bun.copy(u8, &p.found_version_buf, value.data.e_string.data[0..len]); + p.found_version = p.found_version_buf[0..len]; + p.has_found_version = true; + } + } + + if (!p.has_found_bin and strings.eqlComptime(key.data.e_string.data, "bin")) { + p.found_bin = .{ + .bin = value, + }; + p.has_found_bin = true; + } + } + } else if (parent_is_directories) { + if (key.data == .e_string) { + if (!p.has_found_bin and strings.eqlComptime(key.data.e_string.data, "bin")) { + p.found_bin = .{ + .dir = value, + }; + p.has_found_bin = true; } } } - if (p.has_found_name and p.has_found_version) return newExpr(E.Missing{}, loc); + if (p.has_found_name and p.has_found_version and p.has_found_bin) return newExpr(E.Missing{}, loc); + has_properties = true; + if (collect_props) { + properties.append(.{ + .key = key, + .value = value, + .kind = .normal, + .initializer = null, + }) catch bun.outOfMemory(); + } } try p.lexer.expect(.t_close_brace); + + if (collect_props) { + return newExpr(E.Object{ + .properties = G.Property.List.fromList(properties), + }, loc); + } return newExpr(E.Missing{}, loc); }, else => { @@ -775,41 +843,6 @@ pub fn parsePackageJSONUTF8( return try parser.parseExpr(false, true); } -pub fn parsePackageJSONUTF8AlwaysDecode( - source: *const logger.Source, - log: *logger.Log, - allocator: std.mem.Allocator, -) !Expr { - const len = source.contents.len; - - switch (len) { - // This is to be consisntent with how disabled JS files are handled - 0 => { - return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }; - }, - // This is a fast pass I guess - 2 => { - if (strings.eqlComptime(source.contents[0..1], "\"\"") or strings.eqlComptime(source.contents[0..1], "''")) { - return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_string_data }; - } else if (strings.eqlComptime(source.contents[0..1], "{}")) { - return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }; - } else if (strings.eqlComptime(source.contents[0..1], "[]")) { - return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_array_data }; - } - }, - else => {}, - } - - var parser = try JSONLikeParser(.{ - .is_json = true, - .allow_comments = true, - .allow_trailing_commas = true, - }).init(allocator, source.*, log); - bun.assert(parser.source().contents.len > 0); - - return try parser.parseExpr(false, true); -} - const JsonResult = struct { root: Expr, indentation: Indentation = .{}, diff --git a/src/string_mutable.zig b/src/string_mutable.zig index 042184d5014200..0a78d7335f94a0 100644 --- a/src/string_mutable.zig +++ b/src/string_mutable.zig @@ -306,18 +306,18 @@ pub const MutableString = struct { const max = 2048; - pub const Writer = std.io.Writer(*BufferedWriter, anyerror, BufferedWriter.writeAll); + pub const Writer = std.io.Writer(*BufferedWriter, OOM, BufferedWriter.writeAll); inline fn remain(this: *BufferedWriter) []u8 { return this.buffer[this.pos..]; } - pub fn flush(this: *BufferedWriter) !void { + pub fn flush(this: *BufferedWriter) OOM!void { _ = try this.context.writeAll(this.buffer[0..this.pos]); this.pos = 0; } - pub fn writeAll(this: *BufferedWriter, bytes: []const u8) anyerror!usize { + pub fn writeAll(this: *BufferedWriter, bytes: []const u8) OOM!usize { const pending = bytes; if (pending.len >= max) { @@ -342,7 +342,7 @@ pub const MutableString = struct { /// Write a E.String to the buffer. /// This automatically encodes UTF-16 into UTF-8 using /// the same code path as TextEncoder - pub fn writeString(this: *BufferedWriter, bytes: *E.String) anyerror!usize { + pub fn writeString(this: *BufferedWriter, bytes: *E.String) OOM!usize { if (bytes.isUTF8()) { return try this.writeAll(bytes.slice(this.context.allocator)); } @@ -353,7 +353,7 @@ pub const MutableString = struct { /// Write a UTF-16 string to the (UTF-8) buffer /// This automatically encodes UTF-16 into UTF-8 using /// the same code path as TextEncoder - pub fn writeAll16(this: *BufferedWriter, bytes: []const u16) anyerror!usize { + pub fn writeAll16(this: *BufferedWriter, bytes: []const u16) OOM!usize { const pending = bytes; if (pending.len >= max) { @@ -385,7 +385,7 @@ pub const MutableString = struct { return pending.len; } - pub fn writeHTMLAttributeValueString(this: *BufferedWriter, str: *E.String) anyerror!void { + pub fn writeHTMLAttributeValueString(this: *BufferedWriter, str: *E.String) OOM!void { if (str.isUTF8()) { try this.writeHTMLAttributeValue(str.slice(this.context.allocator)); return; @@ -394,7 +394,7 @@ pub const MutableString = struct { try this.writeHTMLAttributeValue16(str.slice16()); } - pub fn writeHTMLAttributeValue(this: *BufferedWriter, bytes: []const u8) anyerror!void { + pub fn writeHTMLAttributeValue(this: *BufferedWriter, bytes: []const u8) OOM!void { var items = bytes; while (items.len > 0) { // TODO: SIMD @@ -416,7 +416,7 @@ pub const MutableString = struct { } } - pub fn writeHTMLAttributeValue16(this: *BufferedWriter, bytes: []const u16) anyerror!void { + pub fn writeHTMLAttributeValue16(this: *BufferedWriter, bytes: []const u16) OOM!void { var items = bytes; while (items.len > 0) { if (strings.indexOfAny16(items, "\"<>")) |j| { From 667821c53a489b67402b9bf34504dd8b401a1f7a Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 09:47:17 -0800 Subject: [PATCH 7/7] ci: Fix canary releases (#15713) --- .buildkite/bootstrap.yml | 17 +--- .buildkite/ci.mjs | 141 ++++++++++++++++----------- .buildkite/scripts/prepare-build.sh | 11 --- .buildkite/scripts/upload-release.sh | 7 +- cmake/Options.cmake | 8 +- scripts/utils.mjs | 2 +- 6 files changed, 87 insertions(+), 99 deletions(-) delete mode 100755 .buildkite/scripts/prepare-build.sh diff --git a/.buildkite/bootstrap.yml b/.buildkite/bootstrap.yml index b0b84616b3eb50..5a75106d5e6ea3 100644 --- a/.buildkite/bootstrap.yml +++ b/.buildkite/bootstrap.yml @@ -13,19 +13,4 @@ steps: agents: queue: "build-darwin" command: - - ".buildkite/scripts/prepare-build.sh" - - - if: "build.branch == 'main' && !build.pull_request.repository.fork" - label: ":github:" - agents: - queue: "test-darwin" - depends_on: - - "darwin-aarch64-build-bun" - - "darwin-x64-build-bun" - - "linux-aarch64-build-bun" - - "linux-x64-build-bun" - - "linux-x64-baseline-build-bun" - - "windows-x64-build-bun" - - "windows-x64-baseline-build-bun" - command: - - ".buildkite/scripts/upload-release.sh" + - "node .buildkite/ci.mjs" diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index c5b2d6f1f09d94..240ab9f16e8d7c 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -11,6 +11,7 @@ import { getBuildkiteEmoji, getBuildMetadata, getBuildNumber, + getCanaryRevision, getCommitMessage, getEmoji, getEnv, @@ -43,7 +44,6 @@ import { * @property {Arch} arch * @property {Abi} [abi] * @property {boolean} [baseline] - * @property {boolean} [canary] * @property {Profile} [profile] */ @@ -91,7 +91,6 @@ function getTargetLabel(target) { * @property {Arch} arch * @property {Abi} [abi] * @property {boolean} [baseline] - * @property {boolean} [canary] * @property {Profile} [profile] * @property {Distro} [distro] * @property {string} release @@ -195,13 +194,15 @@ function getImageLabel(platform) { /** * @param {Platform} platform - * @param {boolean} [dryRun] + * @param {PipelineOptions} options * @returns {string} */ -function getImageName(platform, dryRun) { +function getImageName(platform, options) { const { os, arch, distro, release } = platform; + const { buildImages, publishImages } = options; const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; - if (dryRun) { + + if (buildImages && !publishImages) { return `${name}-build-${getBuildNumber()}`; } return `${name}-v${getBootstrapVersion(os)}`; @@ -256,12 +257,13 @@ function getPriority() { /** * @param {Platform} platform - * @param {Ec2Options} options + * @param {PipelineOptions} options + * @param {Ec2Options} ec2Options * @returns {Agent} */ -function getEc2Agent(platform, options) { +function getEc2Agent(platform, options, ec2Options) { const { os, arch, abi, distro, release } = platform; - const { instanceType, cpuCount, threadsPerCore } = options; + const { instanceType, cpuCount, threadsPerCore } = ec2Options; return { os, arch, @@ -272,7 +274,7 @@ function getEc2Agent(platform, options) { // https://github.com/oven-sh/robobun/blob/d46c07e0ac5ac0f9ffe1012f0e98b59e1a0d387a/src/robobun.ts#L1707 robobun: true, robobun2: true, - "image-name": getImageName(platform), + "image-name": getImageName(platform, options), "instance-type": instanceType, "cpu-count": cpuCount, "threads-per-core": threadsPerCore, @@ -282,9 +284,10 @@ function getEc2Agent(platform, options) { /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {string} */ -function getCppAgent(platform) { +function getCppAgent(platform, options) { const { os, arch } = platform; if (os === "darwin") { @@ -295,7 +298,7 @@ function getCppAgent(platform) { }; } - return getEc2Agent(platform, { + return getEc2Agent(platform, options, { instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge", cpuCount: 32, threadsPerCore: 1, @@ -304,9 +307,10 @@ function getCppAgent(platform) { /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {Agent} */ -function getZigAgent(platform) { +function getZigAgent(platform, options) { const { arch } = platform; return { @@ -320,6 +324,7 @@ function getZigAgent(platform) { // distro: "debian", // release: "11", // }, + // options, // { // instanceType: arch === "aarch64" ? "c8g.2xlarge" : "c7i.2xlarge", // cpuCount: 4, @@ -330,9 +335,10 @@ function getZigAgent(platform) { /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {Agent} */ -function getTestAgent(platform) { +function getTestAgent(platform, options) { const { os, arch } = platform; if (os === "darwin") { @@ -345,7 +351,7 @@ function getTestAgent(platform) { // TODO: `dev-server-ssr-110.test.ts` and `next-build.test.ts` run out of memory at 8GB of memory, so use 16GB instead. if (os === "windows") { - return getEc2Agent(platform, { + return getEc2Agent(platform, options, { instanceType: "c7i.2xlarge", cpuCount: 2, threadsPerCore: 1, @@ -353,14 +359,14 @@ function getTestAgent(platform) { } if (arch === "aarch64") { - return getEc2Agent(platform, { + return getEc2Agent(platform, options, { instanceType: "c8g.xlarge", cpuCount: 2, threadsPerCore: 1, }); } - return getEc2Agent(platform, { + return getEc2Agent(platform, options, { instanceType: "c7i.xlarge", cpuCount: 2, threadsPerCore: 1, @@ -373,16 +379,20 @@ function getTestAgent(platform) { /** * @param {Target} target + * @param {PipelineOptions} options * @returns {Record} */ -function getBuildEnv(target) { - const { profile, baseline, canary, abi } = target; +function getBuildEnv(target, options) { + const { profile, baseline, abi } = target; const release = !profile || profile === "release"; + const { canary } = options; + const revision = typeof canary === "number" ? canary : 1; return { CMAKE_BUILD_TYPE: release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo", ENABLE_BASELINE: baseline ? "ON" : "OFF", - ENABLE_CANARY: canary ? "ON" : "OFF", + ENABLE_CANARY: revision > 0 ? "ON" : "OFF", + CANARY_REVISION: revision, ENABLE_ASSERTIONS: release ? "OFF" : "ON", ENABLE_LOGS: release ? "OFF" : "ON", ABI: abi === "musl" ? "musl" : undefined, @@ -391,34 +401,36 @@ function getBuildEnv(target) { /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {Step} */ -function getBuildVendorStep(platform) { +function getBuildVendorStep(platform, options) { return { key: `${getTargetKey(platform)}-build-vendor`, label: `${getTargetLabel(platform)} - build-vendor`, - agents: getCppAgent(platform), + agents: getCppAgent(platform, options), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), - env: getBuildEnv(platform), + env: getBuildEnv(platform, options), command: "bun run build:ci --target dependencies", }; } /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {Step} */ -function getBuildCppStep(platform) { +function getBuildCppStep(platform, options) { return { key: `${getTargetKey(platform)}-build-cpp`, label: `${getTargetLabel(platform)} - build-cpp`, - agents: getCppAgent(platform), + agents: getCppAgent(platform, options), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: { BUN_CPP_ONLY: "ON", - ...getBuildEnv(platform), + ...getBuildEnv(platform, options), }, command: "bun run build:ci --target bun", }; @@ -442,26 +454,28 @@ function getBuildToolchain(target) { /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {Step} */ -function getBuildZigStep(platform) { +function getBuildZigStep(platform, options) { const toolchain = getBuildToolchain(platform); return { key: `${getTargetKey(platform)}-build-zig`, label: `${getTargetLabel(platform)} - build-zig`, - agents: getZigAgent(platform), + agents: getZigAgent(platform, options), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), - env: getBuildEnv(platform), + env: getBuildEnv(platform, options), command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`, }; } /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {Step} */ -function getLinkBunStep(platform) { +function getLinkBunStep(platform, options) { return { key: `${getTargetKey(platform)}-build-bun`, label: `${getTargetLabel(platform)} - build-bun`, @@ -470,12 +484,12 @@ function getLinkBunStep(platform) { `${getTargetKey(platform)}-build-cpp`, `${getTargetKey(platform)}-build-zig`, ], - agents: getCppAgent(platform), + agents: getCppAgent(platform, options), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: { BUN_LINK_ONLY: "ON", - ...getBuildEnv(platform), + ...getBuildEnv(platform, options), }, command: "bun run build:ci --target bun", }; @@ -483,16 +497,17 @@ function getLinkBunStep(platform) { /** * @param {Platform} platform + * @param {PipelineOptions} options * @returns {Step} */ -function getBuildBunStep(platform) { +function getBuildBunStep(platform, options) { return { key: `${getTargetKey(platform)}-build-bun`, label: `${getTargetLabel(platform)} - build-bun`, - agents: getCppAgent(platform), + agents: getCppAgent(platform, options), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), - env: getBuildEnv(platform), + env: getBuildEnv(platform, options), command: "bun run build:ci", }; } @@ -506,12 +521,13 @@ function getBuildBunStep(platform) { /** * @param {Platform} platform - * @param {TestOptions} [options] + * @param {PipelineOptions} options + * @param {TestOptions} [testOptions] * @returns {Step} */ -function getTestBunStep(platform, options = {}) { +function getTestBunStep(platform, options, testOptions = {}) { const { os } = platform; - const { buildId, unifiedTests, testFiles } = options; + const { buildId, unifiedTests, testFiles } = testOptions; const args = [`--step=${getTargetKey(platform)}-build-bun`]; if (buildId) { @@ -530,7 +546,7 @@ function getTestBunStep(platform, options = {}) { key: `${getPlatformKey(platform)}-test-bun`, label: `${getPlatformLabel(platform)} - test-bun`, depends_on: depends, - agents: getTestAgent(platform), + agents: getTestAgent(platform, options), cancel_on_build_failing: isMergeQueue(), retry: getRetry(), soft_fail: isMainBranch() ? true : [{ exit_status: 2 }], @@ -544,12 +560,13 @@ function getTestBunStep(platform, options = {}) { /** * @param {Platform} platform - * @param {boolean} [dryRun] + * @param {PipelineOptions} options * @returns {Step} */ -function getBuildImageStep(platform, dryRun) { +function getBuildImageStep(platform, options) { const { os, arch, distro, release } = platform; - const action = dryRun ? "create-image" : "publish-image"; + const { publishImages } = options; + const action = publishImages ? "publish-image" : "create-image"; const command = [ "node", "./scripts/machine.mjs", @@ -578,10 +595,14 @@ function getBuildImageStep(platform, dryRun) { } /** - * @param {Platform[]} [buildPlatforms] + * @param {Platform[]} buildPlatforms + * @param {PipelineOptions} options * @returns {Step} */ -function getReleaseStep(buildPlatforms) { +function getReleaseStep(buildPlatforms, options) { + const { canary } = options; + const revision = typeof canary === "number" ? canary : 1; + return { key: "release", label: getBuildkiteEmoji("rocket"), @@ -589,6 +610,9 @@ function getReleaseStep(buildPlatforms) { queue: "test-darwin", }, depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`), + env: { + CANARY: revision, + }, command: ".buildkite/scripts/upload-release.sh", }; } @@ -678,7 +702,7 @@ function getReleaseStep(buildPlatforms) { * @property {string | boolean} [forceTests] * @property {string | boolean} [buildImages] * @property {string | boolean} [publishImages] - * @property {boolean} [canary] + * @property {number} [canary] * @property {Profile[]} [buildProfiles] * @property {Platform[]} [buildPlatforms] * @property {Platform[]} [testPlatforms] @@ -896,6 +920,7 @@ async function getPipelineOptions() { return; } + const canary = await getCanaryRevision(); const buildPlatformsMap = new Map(buildPlatforms.map(platform => [getTargetKey(platform), platform])); const testPlatformsMap = new Map(testPlatforms.map(platform => [getPlatformKey(platform), platform])); @@ -918,7 +943,7 @@ async function getPipelineOptions() { const buildPlatformKeys = parseArray(options["build-platforms"]); const testPlatformKeys = parseArray(options["test-platforms"]); return { - canary: parseBoolean(options["canary"]), + canary: parseBoolean(options["canary"]) ? canary : 0, skipBuilds: parseBoolean(options["skip-builds"]), forceBuilds: parseBoolean(options["force-builds"]), skipTests: parseBoolean(options["skip-tests"]), @@ -952,10 +977,11 @@ async function getPipelineOptions() { return false; }; + const isCanary = + !parseBoolean(getEnv("RELEASE", false) || "false") && + !/\[(release|build release|release build)\]/i.test(commitMessage); return { - canary: - !parseBoolean(getEnv("RELEASE", false) || "false") && - !/\[(release|build release|release build)\]/i.test(commitMessage), + canary: isCanary ? canary : 0, skipEverything: parseOption(/\[(skip ci|no ci)\]/i), skipBuilds: parseOption(/\[(skip builds?|no builds?|only tests?)\]/i), forceBuilds: parseOption(/\[(force builds?)\]/i), @@ -1001,7 +1027,7 @@ async function getPipeline(options = {}) { steps.push({ key: "build-images", group: getBuildkiteEmoji("aws"), - steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform, !publishImages)), + steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform, options)), }); } @@ -1025,22 +1051,21 @@ async function getPipeline(options = {}) { .flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile }))) .map(target => { const imageKey = getImageKey(target); - const imagePlatform = imagePlatforms.get(imageKey); return getStepWithDependsOn( { key: getTargetKey(target), group: getTargetLabel(target), steps: unifiedBuilds - ? [getBuildBunStep(target)] + ? [getBuildBunStep(target, options)] : [ - getBuildVendorStep(target), - getBuildCppStep(target), - getBuildZigStep(target), - getLinkBunStep(target), + getBuildVendorStep(target, options), + getBuildCppStep(target, options), + getBuildZigStep(target, options), + getLinkBunStep(target, options), ], }, - imagePlatform ? `${imageKey}-build-image` : undefined, + imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined, ); }), ); @@ -1055,14 +1080,14 @@ async function getPipeline(options = {}) { .map(target => ({ key: getTargetKey(target), group: getTargetLabel(target), - steps: [getTestBunStep(target, { unifiedTests, testFiles, buildId })], + steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })], })), ); } } if (isMainBranch()) { - steps.push(getReleaseStep(buildPlatforms)); + steps.push(getReleaseStep(buildPlatforms, options)); } /** @type {Map} */ diff --git a/.buildkite/scripts/prepare-build.sh b/.buildkite/scripts/prepare-build.sh deleted file mode 100755 index b0b3f9f37eaf51..00000000000000 --- a/.buildkite/scripts/prepare-build.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -set -eo pipefail - -function run_command() { - set -x - "$@" - { set +x; } 2>/dev/null -} - -run_command node ".buildkite/ci.mjs" "$@" diff --git a/.buildkite/scripts/upload-release.sh b/.buildkite/scripts/upload-release.sh index b684dfb4a3d958..fa5a2db11a143b 100755 --- a/.buildkite/scripts/upload-release.sh +++ b/.buildkite/scripts/upload-release.sh @@ -3,10 +3,6 @@ set -eo pipefail function assert_main() { - if [ "$RELEASE" == "1" ]; then - echo "info: Skipping canary release because this is a release build" - exit 0 - fi if [ -z "$BUILDKITE_REPO" ]; then echo "error: Cannot find repository for this build" exit 1 @@ -237,8 +233,7 @@ function create_release() { } function assert_canary() { - local canary="$(buildkite-agent meta-data get canary 2>/dev/null)" - if [ -z "$canary" ] || [ "$canary" == "0" ]; then + if [ -z "$CANARY" ] || [ "$CANARY" == "0" ]; then echo "warn: Skipping release because this is not a canary build" exit 0 fi diff --git a/cmake/Options.cmake b/cmake/Options.cmake index 201bf8c8e1536a..fe3219c2687686 100644 --- a/cmake/Options.cmake +++ b/cmake/Options.cmake @@ -67,13 +67,7 @@ optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT $ optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON) -if(ENABLE_CANARY AND BUILDKITE) - execute_process( - COMMAND buildkite-agent meta-data get "canary" - OUTPUT_VARIABLE DEFAULT_CANARY_REVISION - OUTPUT_STRIP_TRAILING_WHITESPACE - ) -elseif(ENABLE_CANARY) +if(ENABLE_CANARY) set(DEFAULT_CANARY_REVISION "1") else() set(DEFAULT_CANARY_REVISION "0") diff --git a/scripts/utils.mjs b/scripts/utils.mjs index 57ca7710504ca0..be3dcfc69d8057 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -2209,7 +2209,7 @@ export async function waitForPort(options) { return cause; } /** - * @returns {Promise} + * @returns {Promise} */ export async function getCanaryRevision() { if (isPullRequest() || isFork()) {