diff --git a/.clangd b/.clangd
new file mode 100644
index 00000000000000..35856fb41412f2
--- /dev/null
+++ b/.clangd
@@ -0,0 +1,3 @@
+Index:
+ Background: Skip # Disable slow background indexing of these files.
+
diff --git a/.github/workflows/bun-linux-build.yml b/.github/workflows/bun-linux-build.yml
index ba0a136acfc6e6..b758f7e71243b8 100644
--- a/.github/workflows/bun-linux-build.yml
+++ b/.github/workflows/bun-linux-build.yml
@@ -310,7 +310,7 @@ jobs:
mode: upsert
create_if_not_exists: false
message: |
- ✅ test failures on ${{ matrix.tag }} have been resolved.
+ ✅ test failures on ${{ matrix.tag }} have been resolved. Thank you.
[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
- id: fail
diff --git a/.github/workflows/bun-mac-aarch64.yml b/.github/workflows/bun-mac-aarch64.yml
index 463db45841cdce..9a21861e452689 100644
--- a/.github/workflows/bun-mac-aarch64.yml
+++ b/.github/workflows/bun-mac-aarch64.yml
@@ -472,7 +472,7 @@ jobs:
mode: upsert
create_if_not_exists: false
message: |
- ✅ test failures on ${{ matrix.tag }} have been resolved.
+ ✅ test failures on ${{ matrix.tag }} have been resolved. Thank you.
[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
- id: fail
diff --git a/.github/workflows/bun-mac-x64-baseline.yml b/.github/workflows/bun-mac-x64-baseline.yml
index 83ae54cc09c188..1111c592feb27a 100644
--- a/.github/workflows/bun-mac-x64-baseline.yml
+++ b/.github/workflows/bun-mac-x64-baseline.yml
@@ -463,7 +463,7 @@ jobs:
mode: upsert
create_if_not_exists: false
message: |
- ✅ test failures on ${{ matrix.tag }} have been resolved.
+ ✅ test failures on ${{ matrix.tag }} have been resolved. Thank you.
[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
- id: fail
diff --git a/.github/workflows/bun-mac-x64.yml b/.github/workflows/bun-mac-x64.yml
index ffc3edfe7a4c54..9ccf09c7ae052a 100644
--- a/.github/workflows/bun-mac-x64.yml
+++ b/.github/workflows/bun-mac-x64.yml
@@ -458,7 +458,7 @@ jobs:
mode: upsert
create_if_not_exists: false
message: |
- ✅ test failures on ${{ matrix.tag }} have been resolved.
+ ✅ test failures on ${{ matrix.tag }} have been resolved. Thank you.
[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
- id: fail
diff --git a/.github/workflows/bun-windows.yml b/.github/workflows/bun-windows.yml
index 35c3c1be76d9b3..f30b98b4bd4506 100644
--- a/.github/workflows/bun-windows.yml
+++ b/.github/workflows/bun-windows.yml
@@ -501,7 +501,7 @@ jobs:
mode: upsert
create_if_not_exists: false
message: |
- ✅🪟 Test regressions on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }} have been resolved.
+ ✅🪟 Test regressions on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }} have been resolved. Thank you.
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 00000000000000..e5e3a0aece2458
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,86 @@
+name: lint
+
+permissions:
+ contents: read
+
+on:
+ workflow_dispatch:
+ pull_request:
+ push:
+ branches:
+ - main
+ - jarred/assert
+ paths:
+ - ".github/workflows/lint.yml"
+ - "src/**/*.zig"
+ - "src/*.zig"
+
+jobs:
+ format:
+ name: lint
+ runs-on: ${{ vars.RUNNER_LINUX_X64 || 'ubuntu-latest' }}
+ if: github.repository_owner == 'oven-sh'
+ permissions: write-all
+ outputs:
+ text_output: ${{ steps.lint.outputs.text_output }}
+ json_output: ${{ steps.lint.outputs.json_output }}
+ count: ${{ steps.lint.outputs.count }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Bun
+ uses: ./.github/actions/setup-bun
+ with:
+ bun-version: "1.1.3"
+ - name: Install Dependencies
+ run: |
+ bun --cwd=./packages/bun-internal-test install
+ - name: Lint
+ id: lint
+ run: |
+ bun ./packages/bun-internal-test/src/linter.ts || true
+ - uses: sarisia/actions-status-discord@v1
+ if: always() && steps.lint.outputs.text_output != '' && github.event_name == 'pull_request'
+ with:
+ title: ""
+ webhook: ${{ secrets.DISCORD_WEBHOOK }}
+ status: "failure"
+ noprefix: true
+ nocontext: true
+ description: |
+ Pull Request
+ ### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
+
+ @${{ github.actor }}, there are ${{ steps.lint.outputs.count }} lint errors on ${{ github.ref_name }}
+
+ ${{ steps.lint.outputs.text_output }}
+
+ **[View linter output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
+ - name: Comment on PR
+ if: steps.lint.outputs.text_output != '' && github.event_name == 'pull_request'
+ uses: thollander/actions-comment-pull-request@v2
+ with:
+ comment_tag: lint-failures
+ message: |
+ ❌ @${{ github.actor }} ${{ steps.lint.outputs.count }} lint errors
+
+ ${{ steps.lint.outputs.text_output }}
+
+ **[View linter output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
+
+ [#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
+ - name: Uncomment on PR
+ if: steps.lint.outputs.text_output == '' && github.event_name == 'pull_request'
+ uses: thollander/actions-comment-pull-request@v2
+ with:
+ comment_tag: lint-failures
+ mode: upsert
+ create_if_not_exists: false
+ message: |
+ ✅ lint failures have been resolved. Thank you.
+
+ [#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
+ - id: fail
+ name: Fail the build
+ if: steps.lint.outputs.text_output != ''
+ run: exit 1
diff --git a/packages/bun-internal-test/src/banned.json b/packages/bun-internal-test/src/banned.json
new file mode 100644
index 00000000000000..2689404af4eb87
--- /dev/null
+++ b/packages/bun-internal-test/src/banned.json
@@ -0,0 +1,3 @@
+{
+ "std.debug.assert": "Use bun.assert instead"
+}
diff --git a/packages/bun-internal-test/src/linter.ts b/packages/bun-internal-test/src/linter.ts
new file mode 100644
index 00000000000000..f6d82510db2bda
--- /dev/null
+++ b/packages/bun-internal-test/src/linter.ts
@@ -0,0 +1,68 @@
+import { $ } from "bun";
+import BANNED from "./banned.json";
+import * as action from "@actions/core";
+
+const IGNORED_FOLDERS = [
+ // list of folders to ignore
+ "windows-shim",
+];
+
+const ci = !!process.env["GITHUB_ACTIONS"];
+process.chdir(require("path").join(import.meta.dir, "../../../"));
+let bad = [];
+let report = "";
+const write = (text: string) => {
+ process.stdout.write(text);
+ report += text;
+};
+for (const [banned, suggestion] of Object.entries(BANNED)) {
+ // Run git grep to find occurrences of std.debug.assert in .zig files
+ let stdout = await $`git grep -n "${banned}" "src/**/**.zig"`.text();
+
+ stdout = stdout.trim();
+ if (stdout.length === 0) continue;
+
+ let lines = stdout.split("\n");
+ // Parse each line to extract filename and line number
+ const matches = lines
+ .filter(line => !IGNORED_FOLDERS.some(folder => line.includes(folder)))
+ .map(line => {
+ const [path, lineNumber, ...text] = line.split(":");
+ return { path, lineNumber, banned, suggestion, text: text.join(":") };
+ });
+ // Check if we got any output
+ // Split the output into lines
+ if (matches.length === 0) continue;
+
+ write(`Banned **'${banned}'** found in the following locations:` + "\n");
+ matches.forEach(match => {
+ write(`${match.path}:${match.lineNumber}: ${match.text.trim()}` + "\n");
+ });
+ bad = bad.concat(matches);
+}
+
+if (report.length === 0) {
+ process.exit(0);
+}
+
+function link({ path, lineNumber, suggestion, banned }) {
+ action.error(`Lint failure: ${banned} is banned, ${suggestion}`, {
+ file: path,
+ startLine: Number(lineNumber),
+ endLine: Number(lineNumber),
+ });
+ return `[\`${path}:${lineNumber}\`](https://github.com/oven-sh/bun/blob/${process.env.GITHUB_SHA}/${path}#L${lineNumber})`;
+}
+
+if (ci) {
+ if (report.length > 0) {
+ action.setFailed(`${bad.length} lint failures`);
+ }
+ action.setOutput("count", bad.length);
+ action.setOutput("text_output", bad.map(m => `- ${link(m)}: ${m.banned} is banned, ${m.suggestion}`).join("\n"));
+ action.setOutput("json_output", JSON.stringify(bad));
+ action.summary.addRaw(report);
+ await action.summary.write();
+}
+
+process.exit(1);
diff --git a/src/ArenaAllocator.zig b/src/ArenaAllocator.zig
index bb7d356a2bb35d..c059626a69738f 100644
--- a/src/ArenaAllocator.zig
+++ b/src/ArenaAllocator.zig
@@ -1,5 +1,5 @@
const std = @import("std");
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
const mem = std.mem;
const Allocator = std.mem.Allocator;
@@ -132,7 +132,7 @@ pub const ArenaAllocator = struct {
self.child_allocator.rawFree(alloc_buf, align_bits, @returnAddress());
it = next_it;
} else null;
- std.debug.assert(maybe_first_node == null or maybe_first_node.?.next == null);
+ assert(maybe_first_node == null or maybe_first_node.?.next == null);
// reset the state before we try resizing the buffers, so we definitely have reset the arena to 0.
self.state.end_index = 0;
if (maybe_first_node) |first_node| {
diff --git a/src/StandaloneModuleGraph.zig b/src/StandaloneModuleGraph.zig
index 8e59529c5e8b02..7638c156c0e55d 100644
--- a/src/StandaloneModuleGraph.zig
+++ b/src/StandaloneModuleGraph.zig
@@ -487,7 +487,7 @@ pub const StandaloneModuleGraph = struct {
var outfile_buf: bun.OSPathBuffer = undefined;
const outfile_slice = brk: {
const outfile_w = bun.strings.toWPathNormalized(&outfile_buf, std.fs.path.basenameWindows(outfile));
- std.debug.assert(outfile_w.ptr == &outfile_buf);
+ bun.assert(outfile_w.ptr == &outfile_buf);
const outfile_buf_u16 = bun.reinterpretSlice(u16, &outfile_buf);
if (!bun.strings.endsWithComptime(outfile, ".exe")) {
// append .exe
@@ -652,7 +652,7 @@ pub const StandaloneModuleGraph = struct {
end -= offsets.byte_count;
@memcpy(to_read[0..offsets.byte_count], end[0..offsets.byte_count]);
if (comptime Environment.allow_assert) {
- std.debug.assert(bun.strings.eqlLong(to_read, end[0..offsets.byte_count], true));
+ bun.assert(bun.strings.eqlLong(to_read, end[0..offsets.byte_count], true));
}
}
diff --git a/src/StaticHashMap.zig b/src/StaticHashMap.zig
index 8065dfb8d2bf64..8cf093de64d3e1 100644
--- a/src/StaticHashMap.zig
+++ b/src/StaticHashMap.zig
@@ -6,7 +6,7 @@ const mem = std.mem;
const math = std.math;
const testing = std.testing;
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
pub fn AutoHashMap(comptime K: type, comptime V: type, comptime max_load_percentage: comptime_int) type {
return HashMap(K, V, std.hash_map.AutoContext(K), max_load_percentage);
diff --git a/src/__global.zig b/src/__global.zig
index 4bad953bbabc67..9ea8f20dd648fe 100644
--- a/src/__global.zig
+++ b/src/__global.zig
@@ -162,26 +162,6 @@ pub fn panic(comptime fmt: string, args: anytype) noreturn {
}
}
-// std.debug.assert but happens at runtime
-pub fn invariant(condition: bool, comptime fmt: string, args: anytype) void {
- if (!condition) {
- _invariant(fmt, args);
- }
-}
-
-inline fn _invariant(comptime fmt: string, args: anytype) noreturn {
- @setCold(true);
-
- if (comptime Environment.isWasm) {
- Output.printErrorln(fmt, args);
- Output.flush();
- @panic(fmt);
- } else {
- Output.prettyErrorln(fmt, args);
- Global.exit(1);
- }
-}
-
pub fn notimpl() noreturn {
@setCold(true);
Global.panic("Not implemented yet!!!!!", .{});
diff --git a/src/allocators.zig b/src/allocators.zig
index 06caa08ac0d20a..66d37f2b43fd39 100644
--- a/src/allocators.zig
+++ b/src/allocators.zig
@@ -122,7 +122,7 @@ pub fn OverflowList(comptime ValueType: type, comptime count: comptime_int) type
}
pub fn append(block: *Block, value: ValueType) *ValueType {
- if (comptime Environment.allow_assert) std.debug.assert(block.used < count);
+ if (comptime Environment.allow_assert) bun.assert(block.used < count);
const index = block.used;
block.items[index] = value;
block.used +%= 1;
@@ -155,9 +155,9 @@ pub fn OverflowList(comptime ValueType: type, comptime count: comptime_int) type
else
0;
- if (comptime Environment.allow_assert) std.debug.assert(index.is_overflow);
- if (comptime Environment.allow_assert) std.debug.assert(this.list.used >= block_id);
- if (comptime Environment.allow_assert) std.debug.assert(this.list.ptrs[block_id].used > (index.index % count));
+ if (comptime Environment.allow_assert) bun.assert(index.is_overflow);
+ if (comptime Environment.allow_assert) bun.assert(this.list.used >= block_id);
+ if (comptime Environment.allow_assert) bun.assert(this.list.ptrs[block_id].used > (index.index % count));
return &this.list.ptrs[block_id].items[index.index % count];
}
@@ -168,9 +168,9 @@ pub fn OverflowList(comptime ValueType: type, comptime count: comptime_int) type
else
0;
- if (comptime Environment.allow_assert) std.debug.assert(index.is_overflow);
- if (comptime Environment.allow_assert) std.debug.assert(this.list.used >= block_id);
- if (comptime Environment.allow_assert) std.debug.assert(this.list.ptrs[block_id].used > (index.index % count));
+ if (comptime Environment.allow_assert) bun.assert(index.is_overflow);
+ if (comptime Environment.allow_assert) bun.assert(this.list.used >= block_id);
+ if (comptime Environment.allow_assert) bun.assert(this.list.ptrs[block_id].used > (index.index % count));
return &this.list.ptrs[block_id].items[index.index % count];
}
diff --git a/src/ast/base.zig b/src/ast/base.zig
index 87c64e80433afa..d9bcda83d3179c 100644
--- a/src/ast/base.zig
+++ b/src/ast/base.zig
@@ -262,7 +262,7 @@ pub const Ref = packed struct(u64) {
}
pub fn initSourceEnd(old: Ref) Ref {
- std.debug.assert(old.tag != .invalid);
+ bun.assert(old.tag != .invalid);
return init(old.inner_index, old.source_index, old.tag == .source_contents_slice);
}
diff --git a/src/async/posix_event_loop.zig b/src/async/posix_event_loop.zig
index ee7136a13e793a..2d31c2f5fdc609 100644
--- a/src/async/posix_event_loop.zig
+++ b/src/async/posix_event_loop.zig
@@ -232,7 +232,7 @@ pub const FilePoll = struct {
log("onKQueueEvent: {}", .{poll});
if (KQueueGenerationNumber != u0)
- std.debug.assert(poll.generation_number == kqueue_event.ext[0]);
+ bun.assert(poll.generation_number == kqueue_event.ext[0]);
poll.onUpdate(kqueue_event.data);
}
@@ -335,7 +335,7 @@ pub const FilePoll = struct {
}
const ptr = poll.owner;
- std.debug.assert(!ptr.isNull());
+ bun.assert(!ptr.isNull());
switch (ptr.tag()) {
// @field(Owner.Tag, bun.meta.typeBaseName(@typeName(FIFO))) => {
@@ -567,22 +567,22 @@ pub const FilePoll = struct {
return;
}
- std.debug.assert(poll.next_to_free == null);
+ bun.assert(poll.next_to_free == null);
if (this.pending_free_tail) |tail| {
- std.debug.assert(this.pending_free_head != null);
- std.debug.assert(tail.next_to_free == null);
+ bun.assert(this.pending_free_head != null);
+ bun.assert(tail.next_to_free == null);
tail.next_to_free = poll;
}
if (this.pending_free_head == null) {
this.pending_free_head = poll;
- std.debug.assert(this.pending_free_tail == null);
+ bun.assert(this.pending_free_tail == null);
}
poll.flags.insert(.ignore_updates);
this.pending_free_tail = poll;
- std.debug.assert(vm.after_event_loop_callback == null or vm.after_event_loop_callback == @as(?JSC.OpaqueCallback, @ptrCast(&processDeferredFrees)));
+ bun.assert(vm.after_event_loop_callback == null or vm.after_event_loop_callback == @as(?JSC.OpaqueCallback, @ptrCast(&processDeferredFrees)));
vm.after_event_loop_callback = @ptrCast(&processDeferredFrees);
vm.after_event_loop_callback_ctx = this;
}
@@ -790,7 +790,7 @@ pub const FilePoll = struct {
log("register: FilePoll(0x{x}, generation_number={d}) {s} ({})", .{ @intFromPtr(this), this.generation_number, @tagName(flag), fd });
- std.debug.assert(fd != invalid_fd);
+ bun.assert(fd != invalid_fd);
if (one_shot != .none) {
this.flags.insert(.one_shot);
@@ -945,7 +945,7 @@ pub const FilePoll = struct {
pub fn unregisterWithFd(this: *FilePoll, loop: *Loop, fd: bun.FileDescriptor, force_unregister: bool) JSC.Maybe(void) {
if (Environment.allow_assert) {
- std.debug.assert(fd.int() >= 0 and fd != bun.invalid_fd);
+ bun.assert(fd.int() >= 0 and fd != bun.invalid_fd);
}
defer this.deactivate(loop);
@@ -955,7 +955,7 @@ pub const FilePoll = struct {
return JSC.Maybe(void).success;
}
- std.debug.assert(fd != invalid_fd);
+ bun.assert(fd != invalid_fd);
const watcher_fd = loop.fd;
const flag: Flags = brk: {
if (this.flags.contains(.poll_readable))
@@ -1073,7 +1073,7 @@ pub const FilePoll = struct {
this.flags.remove(.needs_rearm);
this.flags.remove(.one_shot);
// we don't support both right now
- std.debug.assert(!(this.flags.contains(.poll_readable) and this.flags.contains(.poll_writable)));
+ bun.assert(!(this.flags.contains(.poll_readable) and this.flags.contains(.poll_writable)));
this.flags.remove(.poll_readable);
this.flags.remove(.poll_writable);
this.flags.remove(.poll_process);
@@ -1096,7 +1096,7 @@ pub const Closer = struct {
/// for compatibiltiy with windows version
_: anytype,
) void {
- std.debug.assert(fd != bun.invalid_fd);
+ bun.assert(fd != bun.invalid_fd);
JSC.WorkPool.schedule(&Closer.new(.{ .fd = fd }).task);
}
diff --git a/src/async/windows_event_loop.zig b/src/async/windows_event_loop.zig
index 9bcbfa000e06be..978bbd69a3a9c4 100644
--- a/src/async/windows_event_loop.zig
+++ b/src/async/windows_event_loop.zig
@@ -257,7 +257,7 @@ pub const FilePoll = struct {
/// Only intended to be used from EventLoop.Pollable
pub fn deactivate(this: *FilePoll, loop: *Loop) void {
- std.debug.assert(this.flags.contains(.has_incremented_poll_count));
+ bun.assert(this.flags.contains(.has_incremented_poll_count));
loop.active_handles -= @as(u32, @intFromBool(this.flags.contains(.has_incremented_poll_count)));
log("deactivate - {d}", .{loop.active_handles});
this.flags.remove(.has_incremented_poll_count);
@@ -346,22 +346,22 @@ pub const FilePoll = struct {
return;
}
- std.debug.assert(poll.next_to_free == null);
+ bun.assert(poll.next_to_free == null);
if (this.pending_free_tail) |tail| {
- std.debug.assert(this.pending_free_head != null);
- std.debug.assert(tail.next_to_free == null);
+ bun.assert(this.pending_free_head != null);
+ bun.assert(tail.next_to_free == null);
tail.next_to_free = poll;
}
if (this.pending_free_head == null) {
this.pending_free_head = poll;
- std.debug.assert(this.pending_free_tail == null);
+ bun.assert(this.pending_free_tail == null);
}
poll.flags.insert(.ignore_updates);
this.pending_free_tail = poll;
- std.debug.assert(vm.after_event_loop_callback == null or vm.after_event_loop_callback == @as(?JSC.OpaqueCallback, @ptrCast(&processDeferredFrees)));
+ bun.assert(vm.after_event_loop_callback == null or vm.after_event_loop_callback == @as(?JSC.OpaqueCallback, @ptrCast(&processDeferredFrees)));
vm.after_event_loop_callback = @ptrCast(&processDeferredFrees);
vm.after_event_loop_callback_ctx = this;
}
@@ -409,7 +409,7 @@ pub const Closer = struct {
fn onClose(req: *uv.fs_t) callconv(.C) void {
var closer = @fieldParentPtr(Closer, "io_request", req);
- std.debug.assert(closer == @as(*Closer, @alignCast(@ptrCast(req.data.?))));
+ bun.assert(closer == @as(*Closer, @alignCast(@ptrCast(req.data.?))));
bun.sys.syslog("uv_fs_close({}) = {}", .{ bun.toFD(req.file.fd), req.result });
if (comptime Environment.allow_assert) {
diff --git a/src/baby_list.zig b/src/baby_list.zig
index a8ca654d3c8b7f..46cf242e89f10d 100644
--- a/src/baby_list.zig
+++ b/src/baby_list.zig
@@ -75,7 +75,7 @@ pub fn BabyList(comptime Type: type) type {
}
pub fn appendAssumeCapacity(this: *@This(), value: Type) void {
- std.debug.assert(this.cap > this.len);
+ bun.assert(this.cap > this.len);
this.ptr[this.len] = value;
this.len += 1;
}
@@ -91,10 +91,10 @@ pub fn BabyList(comptime Type: type) type {
pub fn appendSliceAssumeCapacity(this: *@This(), values: []const Type) void {
const tail = this.ptr[this.len .. this.len + values.len];
- std.debug.assert(this.cap >= this.len + @as(u32, @truncate(values.len)));
+ bun.assert(this.cap >= this.len + @as(u32, @truncate(values.len)));
bun.copy(Type, tail, values);
this.len += @as(u32, @truncate(values.len));
- std.debug.assert(this.cap >= this.len);
+ bun.assert(this.cap >= this.len);
}
pub fn initCapacity(allocator: std.mem.Allocator, len: usize) !ListType {
@@ -128,7 +128,7 @@ pub fn BabyList(comptime Type: type) type {
}
if (comptime Environment.allow_assert) {
- std.debug.assert(list_.items.len <= list_.capacity);
+ bun.assert(list_.items.len <= list_.capacity);
}
return ListType{
@@ -163,7 +163,7 @@ pub fn BabyList(comptime Type: type) type {
};
if (comptime Environment.allow_assert) {
- std.debug.assert(this.len <= this.cap);
+ bun.assert(this.len <= this.cap);
}
}
@@ -195,12 +195,12 @@ pub fn BabyList(comptime Type: type) type {
}
pub inline fn at(this: ListType, index: usize) *const Type {
- std.debug.assert(index < this.len);
+ bun.assert(index < this.len);
return &this.ptr[index];
}
pub inline fn mut(this: ListType, index: usize) *Type {
- std.debug.assert(index < this.len);
+ bun.assert(index < this.len);
return &this.ptr[index];
}
@@ -304,7 +304,7 @@ pub fn BabyList(comptime Type: type) type {
pub fn writeTypeAsBytesAssumeCapacity(this: *@This(), comptime Int: type, int: Int) void {
if (comptime Type != u8)
@compileError("Unsupported for type " ++ @typeName(Type));
- std.debug.assert(this.cap >= this.len + @sizeOf(Int));
+ bun.assert(this.cap >= this.len + @sizeOf(Int));
@as([*]align(1) Int, @ptrCast(this.ptr[this.len .. this.len + @sizeOf(Int)]))[0] = int;
this.len += @sizeOf(Int);
}
diff --git a/src/base64/base64.zig b/src/base64/base64.zig
index 7b6a040a9e65d7..8345350969704a 100644
--- a/src/base64/base64.zig
+++ b/src/base64/base64.zig
@@ -82,7 +82,7 @@ pub fn encodeURLSafe(dest: []u8, source: []const u8) usize {
}
const zig_base64 = struct {
- const assert = std.debug.assert;
+ const assert = bun.assert;
const testing = std.testing;
const mem = std.mem;
diff --git a/src/bit_set.zig b/src/bit_set.zig
index 6c1875a61420df..72794562770c9d 100644
--- a/src/bit_set.zig
+++ b/src/bit_set.zig
@@ -86,7 +86,7 @@ pub fn IntegerBitSet(comptime size: u16) type {
/// Returns true if the bit at the specified index
/// is present in the set, false otherwise.
pub fn isSet(self: Self, index: usize) bool {
- if (comptime Environment.allow_assert) std.debug.assert(index < bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < bit_length);
return (self.mask & maskBit(index)) != 0;
}
@@ -98,7 +98,7 @@ pub fn IntegerBitSet(comptime size: u16) type {
/// Changes the value of the specified bit of the bit
/// set to match the passed boolean.
pub fn setValue(self: *Self, index: usize, value: bool) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < bit_length);
if (MaskInt == u0) return;
const bit = maskBit(index);
const new_bit = bit & std.math.boolMask(MaskInt, value);
@@ -107,7 +107,7 @@ pub fn IntegerBitSet(comptime size: u16) type {
/// Adds a specific bit to the bit set
pub fn set(self: *Self, index: usize) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < bit_length);
self.mask |= maskBit(index);
}
@@ -115,8 +115,8 @@ pub fn IntegerBitSet(comptime size: u16) type {
/// match the passed boolean.
pub fn setRangeValue(self: *Self, range: Range, value: bool) void {
if (comptime Environment.allow_assert) {
- std.debug.assert(range.end <= bit_length);
- std.debug.assert(range.start <= range.end);
+ bun.assert(range.end <= bit_length);
+ bun.assert(range.start <= range.end);
}
if (range.start == range.end) return;
if (MaskInt == u0) return;
@@ -140,7 +140,7 @@ pub fn IntegerBitSet(comptime size: u16) type {
/// Removes a specific bit from the bit set
pub fn unset(self: *Self, index: usize) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < bit_length);
// Workaround for #7953
if (MaskInt == u0) return;
self.mask &= ~maskBit(index);
@@ -148,7 +148,7 @@ pub fn IntegerBitSet(comptime size: u16) type {
/// Flips a specific bit in the bit set
pub fn toggle(self: *Self, index: usize) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < bit_length);
self.mask ^= maskBit(index);
}
@@ -403,7 +403,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type {
/// Returns true if the bit at the specified index
/// is present in the set, false otherwise.
pub inline fn isSet(self: *const Self, index: usize) bool {
- if (comptime Environment.allow_assert) std.debug.assert(index < bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < bit_length);
if (num_masks == 0) return false; // doesn't compile in this case
return (self.masks[maskIndex(index)] & maskBit(index)) != 0;
}
@@ -420,7 +420,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type {
/// Changes the value of the specified bit of the bit
/// set to match the passed boolean.
pub fn setValue(self: *Self, index: usize, value: bool) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < bit_length);
if (num_masks == 0) return; // doesn't compile in this case
const bit = maskBit(index);
const mask_index = maskIndex(index);
@@ -430,7 +430,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type {
/// Adds a specific bit to the bit set
pub fn set(self: *Self, index: usize) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < bit_length);
if (num_masks == 0) return; // doesn't compile in this case
self.masks[maskIndex(index)] |= maskBit(index);
}
@@ -439,8 +439,8 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type {
/// match the passed boolean.
pub fn setRangeValue(self: *Self, range: Range, value: bool) void {
if (comptime Environment.allow_assert) {
- std.debug.assert(range.end <= bit_length);
- std.debug.assert(range.start <= range.end);
+ bun.assert(range.end <= bit_length);
+ bun.assert(range.start <= range.end);
}
if (range.start == range.end) return;
if (num_masks == 0) return;
@@ -484,14 +484,14 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type {
/// Removes a specific bit from the bit set
pub fn unset(self: *Self, index: usize) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < bit_length);
if (num_masks == 0) return; // doesn't compile in this case
self.masks[maskIndex(index)] &= ~maskBit(index);
}
/// Flips a specific bit in the bit set
pub fn toggle(self: *Self, index: usize) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < bit_length);
if (num_masks == 0) return; // doesn't compile in this case
self.masks[maskIndex(index)] ^= maskBit(index);
}
@@ -772,7 +772,7 @@ pub const DynamicBitSetUnmanaged = struct {
const old_allocation = (self.masks - 1)[0..(self.masks - 1)[0]];
if (new_masks == 0) {
- if (comptime Environment.allow_assert) std.debug.assert(new_len == 0);
+ if (comptime Environment.allow_assert) bun.assert(new_len == 0);
allocator.free(old_allocation);
self.masks = empty_masks_ptr;
self.bit_length = 0;
@@ -847,7 +847,7 @@ pub const DynamicBitSetUnmanaged = struct {
/// Returns true if the bit at the specified index
/// is present in the set, false otherwise.
pub fn isSet(self: Self, index: usize) bool {
- if (comptime Environment.allow_assert) std.debug.assert(index < self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < self.bit_length);
return (self.masks[maskIndex(index)] & maskBit(index)) != 0;
}
@@ -878,7 +878,7 @@ pub const DynamicBitSetUnmanaged = struct {
/// Changes the value of the specified bit of the bit
/// set to match the passed boolean.
pub fn setValue(self: *Self, index: usize, value: bool) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < self.bit_length);
const bit = maskBit(index);
const mask_index = maskIndex(index);
const new_bit = bit & std.math.boolMask(MaskInt, value);
@@ -887,15 +887,15 @@ pub const DynamicBitSetUnmanaged = struct {
/// Adds a specific bit to the bit set
pub fn set(self: *Self, index: usize) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < self.bit_length);
self.masks[maskIndex(index)] |= maskBit(index);
}
/// Changes the value of all bits in the specified range to
/// match the passed boolean.
pub fn setRangeValue(self: *Self, range: Range, value: bool) void {
- if (comptime Environment.allow_assert) std.debug.assert(range.end <= self.bit_length);
- if (comptime Environment.allow_assert) std.debug.assert(range.start <= range.end);
+ if (comptime Environment.allow_assert) bun.assert(range.end <= self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(range.start <= range.end);
if (range.start == range.end) return;
const start_mask_index = maskIndex(range.start);
@@ -937,13 +937,13 @@ pub const DynamicBitSetUnmanaged = struct {
/// Removes a specific bit from the bit set
pub fn unset(self: *Self, index: usize) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < self.bit_length);
self.masks[maskIndex(index)] &= ~maskBit(index);
}
/// Flips a specific bit in the bit set
pub fn toggle(self: *Self, index: usize) void {
- if (comptime Environment.allow_assert) std.debug.assert(index < self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(index < self.bit_length);
self.masks[maskIndex(index)] ^= maskBit(index);
}
@@ -951,7 +951,7 @@ pub const DynamicBitSetUnmanaged = struct {
/// in the toggles bit set. Both sets must have the
/// same bit_length.
pub fn toggleSet(self: *Self, toggles: Self) void {
- if (comptime Environment.allow_assert) std.debug.assert(toggles.bit_length == self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(toggles.bit_length == self.bit_length);
const bit_length = self.bit_length;
if (bit_length == 0) return;
const num_masks = numMasks(self.bit_length);
@@ -1013,7 +1013,7 @@ pub const DynamicBitSetUnmanaged = struct {
/// set if the corresponding bits were set in either input.
/// The two sets must both be the same bit_length.
pub fn setUnion(self: *Self, other: Self) void {
- if (comptime Environment.allow_assert) std.debug.assert(other.bit_length == self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(other.bit_length == self.bit_length);
const num_masks = numMasks(self.bit_length);
for (self.masks[0..num_masks], other.masks) |*mask, other_mask| {
mask.* |= other_mask;
@@ -1025,7 +1025,7 @@ pub const DynamicBitSetUnmanaged = struct {
/// set if the corresponding bits were set in both inputs.
/// The two sets must both be the same bit_length.
pub fn setIntersection(self: *Self, other: Self) void {
- if (comptime Environment.allow_assert) std.debug.assert(other.bit_length == self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(other.bit_length == self.bit_length);
const num_masks = numMasks(self.bit_length);
for (self.masks[0..num_masks], other.masks) |*mask, other_mask| {
mask.* &= other_mask;
@@ -1033,7 +1033,7 @@ pub const DynamicBitSetUnmanaged = struct {
}
pub fn setExcludeTwo(self: *Self, other: Self, third: Self) void {
- if (comptime Environment.allow_assert) std.debug.assert(other.bit_length == self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(other.bit_length == self.bit_length);
const num_masks = numMasks(self.bit_length);
for (self.masks[0..num_masks], other.masks[0..num_masks], third.masks[0..num_masks]) |*mask, other_mask, third_mask| {
mask.* &= ~other_mask;
@@ -1042,7 +1042,7 @@ pub const DynamicBitSetUnmanaged = struct {
}
pub fn setExclude(self: *Self, other: Self) void {
- if (comptime Environment.allow_assert) std.debug.assert(other.bit_length == self.bit_length);
+ if (comptime Environment.allow_assert) bun.assert(other.bit_length == self.bit_length);
const num_masks = numMasks(self.bit_length);
for (self.masks[0..num_masks], other.masks) |*mask, other_mask| {
mask.* &= ~other_mask;
diff --git a/src/boringssl.zig b/src/boringssl.zig
index 538ac9f0b9d189..de03293d3178a4 100644
--- a/src/boringssl.zig
+++ b/src/boringssl.zig
@@ -12,7 +12,7 @@ pub fn load() void {
if (loaded) return;
loaded = true;
boring.CRYPTO_library_init();
- std.debug.assert(boring.SSL_library_init() > 0);
+ bun.assert(boring.SSL_library_init() > 0);
boring.SSL_load_error_strings();
boring.ERR_load_BIO_strings();
boring.OpenSSL_add_all_algorithms();
diff --git a/src/brotli.zig b/src/brotli.zig
index c71d45094a06e4..755f8ec794ea48 100644
--- a/src/brotli.zig
+++ b/src/brotli.zig
@@ -65,7 +65,7 @@ pub const BrotliReaderArrayList = struct {
if (options.params.DISABLE_RING_BUFFER_REALLOCATION)
_ = brotli.setParameter(c.BrotliDecoderParameter.DISABLE_RING_BUFFER_REALLOCATION, 1);
- std.debug.assert(list.items.ptr != input.ptr);
+ bun.assert(list.items.ptr != input.ptr);
return BrotliReaderArrayList.new(
.{
@@ -91,7 +91,7 @@ pub const BrotliReaderArrayList = struct {
return;
}
- std.debug.assert(this.list.items.ptr != this.input.ptr);
+ bun.assert(this.list.items.ptr != this.input.ptr);
while (this.state == State.Uninitialized or this.state == State.Inflating) {
var unused_capacity = this.list.unusedCapacitySlice();
@@ -101,7 +101,7 @@ pub const BrotliReaderArrayList = struct {
unused_capacity = this.list.unusedCapacitySlice();
}
- std.debug.assert(unused_capacity.len > 0);
+ bun.assert(unused_capacity.len > 0);
var next_in = this.input[this.total_in..];
@@ -126,7 +126,7 @@ pub const BrotliReaderArrayList = struct {
switch (result) {
.success => {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.brotli.isFinished());
+ bun.assert(this.brotli.isFinished());
}
this.end();
diff --git a/src/bun.js/ConsoleObject.zig b/src/bun.js/ConsoleObject.zig
index fd9009f8b36f3e..cb723aba418472 100644
--- a/src/bun.js/ConsoleObject.zig
+++ b/src/bun.js/ConsoleObject.zig
@@ -2876,7 +2876,7 @@ pub const Formatter = struct {
writer.writeAll(" />");
},
.Object => {
- std.debug.assert(value.isCell());
+ bun.assert(value.isCell());
const prev_quote_strings = this.quote_strings;
this.quote_strings = true;
defer this.quote_strings = prev_quote_strings;
@@ -3058,7 +3058,7 @@ pub const Formatter = struct {
const target = value.getProxyInternalField(.target);
if (Environment.allow_assert) {
// Proxy does not allow non-objects here.
- std.debug.assert(target.isCell());
+ bun.assert(target.isCell());
}
// TODO: if (options.showProxy), print like `Proxy { target: ..., handlers: ... }`
// this is default off so it is not used.
diff --git a/src/bun.js/RuntimeTranspilerCache.zig b/src/bun.js/RuntimeTranspilerCache.zig
index 35e7be07961083..d6dff34efb9493 100644
--- a/src/bun.js/RuntimeTranspilerCache.zig
+++ b/src/bun.js/RuntimeTranspilerCache.zig
@@ -203,7 +203,7 @@ pub const RuntimeTranspilerCache = struct {
var metadata_stream2 = std.io.fixedBufferStream(metadata_buf[0..Metadata.size]);
var metadata2 = Metadata{};
metadata2.decode(metadata_stream2.reader()) catch |err| bun.Output.panic("Metadata did not rountrip encode -> decode successfully: {s}", .{@errorName(err)});
- std.debug.assert(std.meta.eql(metadata, metadata2));
+ bun.assert(std.meta.eql(metadata, metadata2));
}
break :brk metadata_buf[0..metadata_stream.pos];
@@ -228,16 +228,16 @@ pub const RuntimeTranspilerCache = struct {
var total: usize = 0;
for (vecs) |v| {
if (comptime bun.Environment.isWindows) {
- std.debug.assert(v.len > 0);
+ bun.assert(v.len > 0);
total += v.len;
} else {
- std.debug.assert(v.iov_len > 0);
+ bun.assert(v.iov_len > 0);
total += v.iov_len;
}
}
- std.debug.assert(end_position == total);
+ bun.assert(end_position == total);
}
- std.debug.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size)));
+ bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size)));
bun.C.preallocate_file(tmpfile.fd.cast(), 0, @intCast(end_position)) catch {};
while (position < end_position) {
@@ -264,7 +264,7 @@ pub const RuntimeTranspilerCache = struct {
return error.MissingData;
}
- std.debug.assert(this.output_code == .utf8 and this.output_code.utf8.len == 0); // this should be the default value
+ bun.assert(this.output_code == .utf8 and this.output_code.utf8.len == 0); // this should be the default value
this.output_code = if (this.metadata.output_byte_length == 0)
.{ .string = bun.String.empty }
@@ -456,7 +456,7 @@ pub const RuntimeTranspilerCache = struct {
var cache_file_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
const cache_file_path = try getCacheFilePath(&cache_file_path_buf, input_hash);
- std.debug.assert(cache_file_path.len > 0);
+ bun.assert(cache_file_path.len > 0);
return fromFileWithCacheFilePath(
bun.PathString.init(cache_file_path),
input_hash,
@@ -626,7 +626,7 @@ pub const RuntimeTranspilerCache = struct {
if (this.input_hash == null or is_disabled) {
return;
}
- std.debug.assert(this.entry == null);
+ bun.assert(this.entry == null);
const output_code = bun.String.createLatin1(output_code_bytes);
this.output_code = output_code;
diff --git a/src/bun.js/api/BunObject.zig b/src/bun.js/api/BunObject.zig
index 954fde690717b3..320d58fd4592f9 100644
--- a/src/bun.js/api/BunObject.zig
+++ b/src/bun.js/api/BunObject.zig
@@ -1959,7 +1959,7 @@ pub const Crypto = struct {
algorithm: PasswordObject.Algorithm.Value,
comptime sync: bool,
) JSC.JSValue {
- std.debug.assert(password.len > 0); // caller must check
+ assert(password.len > 0); // caller must check
if (comptime sync) {
const value = HashJob.getValue(password, algorithm);
@@ -2001,7 +2001,7 @@ pub const Crypto = struct {
algorithm: ?PasswordObject.Algorithm,
comptime sync: bool,
) JSC.JSValue {
- std.debug.assert(password.len > 0); // caller must check
+ assert(password.len > 0); // caller must check
if (comptime sync) {
const value = VerifyJob.getValue(password, prev_hash, algorithm);
@@ -2967,7 +2967,7 @@ pub fn serve(
}
pub export fn Bun__escapeHTML16(globalObject: *JSC.JSGlobalObject, input_value: JSValue, ptr: [*]const u16, len: usize) JSValue {
- std.debug.assert(len > 0);
+ assert(len > 0);
const input_slice = ptr[0..len];
const escaped = strings.escapeHTMLForUTF16Input(globalObject.bunVM().allocator, input_slice) catch {
globalObject.vm().throwError(globalObject, ZigString.init("Out of memory").toValue(globalObject));
@@ -2982,7 +2982,7 @@ pub export fn Bun__escapeHTML16(globalObject: *JSC.JSGlobalObject, input_value:
.allocated => |escaped_html| {
if (comptime Environment.allow_assert) {
// assert that re-encoding the string produces the same result
- std.debug.assert(
+ assert(
std.mem.eql(
u16,
(strings.toUTF16Alloc(bun.default_allocator, strings.toUTF8Alloc(bun.default_allocator, escaped_html) catch unreachable, false, false) catch unreachable).?,
@@ -2991,7 +2991,7 @@ pub export fn Bun__escapeHTML16(globalObject: *JSC.JSGlobalObject, input_value:
);
// assert we do not allocate a new string unnecessarily
- std.debug.assert(
+ assert(
!std.mem.eql(
u16,
input_slice,
@@ -3000,7 +3000,7 @@ pub export fn Bun__escapeHTML16(globalObject: *JSC.JSGlobalObject, input_value:
);
// the output should always be longer than the input
- std.debug.assert(escaped_html.len > input_slice.len);
+ assert(escaped_html.len > input_slice.len);
}
return ZigString.from16(escaped_html.ptr, escaped_html.len).toExternalValue(globalObject);
@@ -3009,7 +3009,7 @@ pub export fn Bun__escapeHTML16(globalObject: *JSC.JSGlobalObject, input_value:
}
pub export fn Bun__escapeHTML8(globalObject: *JSC.JSGlobalObject, input_value: JSValue, ptr: [*]const u8, len: usize) JSValue {
- std.debug.assert(len > 0);
+ assert(len > 0);
const input_slice = ptr[0..len];
var stack_allocator = std.heap.stackFallback(256, globalObject.bunVM().allocator);
@@ -3028,10 +3028,10 @@ pub export fn Bun__escapeHTML8(globalObject: *JSC.JSGlobalObject, input_value: J
.allocated => |escaped_html| {
if (comptime Environment.allow_assert) {
// the output should always be longer than the input
- std.debug.assert(escaped_html.len > input_slice.len);
+ assert(escaped_html.len > input_slice.len);
// assert we do not allocate a new string unnecessarily
- std.debug.assert(
+ assert(
!std.mem.eql(
u8,
input_slice,
@@ -3913,7 +3913,7 @@ pub const Timer = struct {
if (comptime Environment.allow_assert)
// If this is ever -1, it's invalid.
// It should always be at least 1.
- std.debug.assert(this.interval > 0);
+ assert(this.interval > 0);
if (!Timeout.runFromConcurrentTask(timer_id, vm, this, reschedule) or this.cancelled) {
this.deinit();
@@ -3944,7 +3944,7 @@ pub const Timer = struct {
}
pub fn schedule(this: *TimerReference, interval: ?i32) void {
- std.debug.assert(!this.cancelled);
+ assert(!this.cancelled);
_ = this.scheduled_count.fetchAdd(1, .Monotonic);
const ms: usize = @max(interval orelse this.interval, 1);
if (Environment.isWindows) {
@@ -4774,7 +4774,7 @@ pub const FFIObject = struct {
var addr: usize = @intFromPtr(array_buffer.ptr);
// const Sizes = @import("../bindings/sizes.zig");
- // std.debug.assert(addr == @intFromPtr(value.asEncoded().ptr) + Sizes.Bun_FFI_PointerOffsetToTypedArrayVector);
+ // assert(addr == @intFromPtr(value.asEncoded().ptr) + Sizes.Bun_FFI_PointerOffsetToTypedArrayVector);
if (byteOffset) |off| {
if (!off.isEmptyOrUndefinedOrNull()) {
@@ -4808,7 +4808,7 @@ pub const FFIObject = struct {
}
if (comptime Environment.allow_assert) {
- std.debug.assert(JSC.JSValue.fromPtrAddress(addr).asPtrAddress() == addr);
+ assert(JSC.JSValue.fromPtrAddress(addr).asPtrAddress() == addr);
}
return JSC.JSValue.fromPtrAddress(addr);
@@ -5281,3 +5281,5 @@ comptime {
_ = Crypto.JSPasswordObject.JSPasswordObject__create;
BunObject.exportAll();
}
+
+const assert = bun.assert;
diff --git a/src/bun.js/api/bun/dns_resolver.zig b/src/bun.js/api/bun/dns_resolver.zig
index 125571df52d899..0095bc99de6106 100644
--- a/src/bun.js/api/bun/dns_resolver.zig
+++ b/src/bun.js/api/bun/dns_resolver.zig
@@ -119,9 +119,9 @@ const LibInfo = struct {
return promise_value;
}
- std.debug.assert(request.backend.libinfo.machport != null);
+ bun.assert(request.backend.libinfo.machport != null);
request.backend.libinfo.file_poll = bun.Async.FilePoll.init(this.vm, bun.toFD(std.math.maxInt(i32) - 1), .{}, GetAddrInfoRequest, request);
- std.debug.assert(
+ bun.assert(
request.backend.libinfo.file_poll.?.registerWithFd(
this.vm.event_loop_handle.?,
.machport,
@@ -1294,7 +1294,7 @@ pub const GetAddrInfoRequest = struct {
pub fn onLibUVComplete(uv_info: *libuv.uv_getaddrinfo_t) void {
log("onLibUVComplete: status={d}", .{uv_info.retcode.int()});
const this: *GetAddrInfoRequest = @alignCast(@ptrCast(uv_info.data));
- std.debug.assert(uv_info == &this.backend.libc.uv);
+ bun.assert(uv_info == &this.backend.libc.uv);
if (this.backend == .libinfo) {
if (this.backend.libinfo.file_poll) |poll| poll.deinit();
}
@@ -1650,7 +1650,7 @@ pub const DNSResolver = struct {
fn getKey(this: *DNSResolver, index: u8, comptime cache_name: []const u8, comptime request_type: type) request_type.PendingCacheKey {
var cache = &@field(this, cache_name);
- std.debug.assert(!cache.available.isSet(index));
+ bun.assert(!cache.available.isSet(index));
const entry = cache.buffer[index];
cache.buffer[index] = undefined;
diff --git a/src/bun.js/api/bun/process.zig b/src/bun.js/api/bun/process.zig
index 9cdddf25dd81be..41c239b14dfd27 100644
--- a/src/bun.js/api/bun/process.zig
+++ b/src/bun.js/api/bun/process.zig
@@ -633,7 +633,7 @@ pub const PollerWindows = union(enum) {
pub fn deinit(this: *PollerWindows) void {
if (this.* == .uv) {
- std.debug.assert(this.uv.isClosed());
+ bun.assert(this.uv.isClosed());
}
}
@@ -846,7 +846,7 @@ const WaiterThreadPosix = struct {
const stack_size = 512 * 1024;
pub var instance: WaiterThread = .{};
pub fn init() !void {
- std.debug.assert(should_use_waiter_thread);
+ bun.assert(should_use_waiter_thread);
if (instance.started.fetchMax(1, .Monotonic) > 0) {
return;
@@ -1614,7 +1614,7 @@ pub fn spawnProcessWindows(
defer {
if (dup_src != null) {
- if (Environment.allow_assert) std.debug.assert(dup_src != null and dup_tgt != null);
+ if (Environment.allow_assert) bun.assert(dup_src != null and dup_tgt != null);
}
if (failed) {
@@ -1635,7 +1635,7 @@ pub fn spawnProcessWindows(
}
process.pid = process.poller.uv.pid;
- std.debug.assert(process.poller.uv.exit_cb == &Process.onExitUV);
+ bun.assert(process.poller.uv.exit_cb == &Process.onExitUV);
var result = WindowsSpawnResult{
.process_ = process,
diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig
index f28f07cf6ac99b..835052d680b19f 100644
--- a/src/bun.js/api/bun/socket.zig
+++ b/src/bun.js/api/bun/socket.zig
@@ -32,8 +32,8 @@ const Async = bun.Async;
// this.holder = owner;
// if (this.ptr == null) {
// this.ptr = bun.default_allocator.alloc(u8, 16384) catch @panic("Out of memory allocating corker");
-// std.debug.assert(this.list.cap == 0);
-// std.debug.assert(this.list.len == 0);
+// bun.assert(this.list.cap == 0);
+// bun.assert(this.list.len == 0);
// this.list.cap = 16384;
// this.list.ptr = this.ptr.?;
// this.list.len = 0;
@@ -298,7 +298,7 @@ const Handlers = struct {
pub fn unprotect(this: *Handlers) void {
if (comptime Environment.allow_assert) {
- std.debug.assert(this.protection_count > 0);
+ bun.assert(this.protection_count > 0);
this.protection_count -= 1;
}
this.onOpen.unprotect();
@@ -790,7 +790,7 @@ pub const Listener = struct {
log("onCreate", .{});
var listener: *Listener = socket.context().?.ext(ssl, *Listener).?.*;
const Socket = NewSocket(ssl);
- std.debug.assert(ssl == listener.ssl);
+ bun.assert(ssl == listener.ssl);
var this_socket = listener.handlers.vm.allocator.create(Socket) catch @panic("Out of memory");
this_socket.* = Socket{
@@ -856,8 +856,8 @@ pub const Listener = struct {
this.strong_self.deinit();
this.strong_data.deinit();
this.poll_ref.unref(this.handlers.vm);
- std.debug.assert(this.listener == null);
- std.debug.assert(this.handlers.active_connections == 0);
+ bun.assert(this.listener == null);
+ bun.assert(this.handlers.active_connections == 0);
this.handlers.unprotect();
if (this.socket_context) |ctx| {
@@ -2142,7 +2142,7 @@ fn NewSocket(comptime ssl: bool) type {
var buffer_ptr = @as([*c]u8, @ptrCast(buffer.asArrayBuffer(globalObject).?.ptr));
const result_size = BoringSSL.i2d_SSL_SESSION(session, &buffer_ptr);
- std.debug.assert(result_size == size);
+ bun.assert(result_size == size);
return buffer;
}
@@ -2409,7 +2409,7 @@ fn NewSocket(comptime ssl: bool) type {
const buffer_ptr = @as(*anyopaque, @ptrCast(buffer.asArrayBuffer(globalObject).?.ptr));
const result_size = BoringSSL.SSL_get_peer_finished(ssl_ptr, buffer_ptr, buffer_size);
- std.debug.assert(result_size == size);
+ bun.assert(result_size == size);
return buffer;
}
@@ -2441,7 +2441,7 @@ fn NewSocket(comptime ssl: bool) type {
const buffer_ptr = @as(*anyopaque, @ptrCast(buffer.asArrayBuffer(globalObject).?.ptr));
const result_size = BoringSSL.SSL_get_finished(ssl_ptr, buffer_ptr, buffer_size);
- std.debug.assert(result_size == size);
+ bun.assert(result_size == size);
return buffer;
}
diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig
index df57bce7d35f19..7aba7f26e796c1 100644
--- a/src/bun.js/api/bun/subprocess.zig
+++ b/src/bun.js/api/bun/subprocess.zig
@@ -31,7 +31,7 @@ pub inline fn assertStdioResult(result: StdioResult) void {
if (comptime Environment.allow_assert) {
if (Environment.isPosix) {
if (result) |fd| {
- std.debug.assert(fd != bun.invalid_fd);
+ bun.assert(fd != bun.invalid_fd);
}
}
}
@@ -1055,11 +1055,11 @@ pub const Subprocess = struct {
fn deinit(this: *PipeReader) void {
if (comptime Environment.isPosix) {
- std.debug.assert(this.reader.isDone());
+ bun.assert(this.reader.isDone());
}
if (comptime Environment.isWindows) {
- std.debug.assert(this.reader.source == null or this.reader.source.?.isClosed());
+ bun.assert(this.reader.source == null or this.reader.source.?.isClosed());
}
if (this.state == .done) {
@@ -1238,7 +1238,7 @@ pub const Subprocess = struct {
};
},
.memfd => |memfd| {
- std.debug.assert(memfd != bun.invalid_fd);
+ bun.assert(memfd != bun.invalid_fd);
return Writable{ .memfd = memfd };
},
.fd => {
@@ -1485,7 +1485,7 @@ pub const Subprocess = struct {
// access GC'd values during the finalizer
this.this_jsvalue = .zero;
- std.debug.assert(!this.hasPendingActivity() or JSC.VirtualMachine.get().isShuttingDown());
+ bun.assert(!this.hasPendingActivity() or JSC.VirtualMachine.get().isShuttingDown());
this.finalizeStreams();
this.process.detach();
diff --git a/src/bun.js/api/bun/x509.zig b/src/bun.js/api/bun/x509.zig
index 48d8bb87916ea5..14e50d75165bd9 100644
--- a/src/bun.js/api/bun/x509.zig
+++ b/src/bun.js/api/bun/x509.zig
@@ -421,7 +421,7 @@ fn getRawDERCertificate(cert: *BoringSSL.X509, globalObject: *JSGlobalObject) JS
var buffer = JSValue.createBufferFromLength(globalObject, @as(usize, @intCast(size)));
var buffer_ptr = buffer.asArrayBuffer(globalObject).?.ptr;
const result_size = BoringSSL.i2d_X509(cert, &buffer_ptr);
- std.debug.assert(result_size == size);
+ bun.assert(result_size == size);
return buffer;
}
@@ -523,7 +523,7 @@ pub fn toJS(cert: *BoringSSL.X509, globalObject: *JSGlobalObject) JSValue {
const buffer_ptr = @as([*c]u8, @ptrCast(buffer.asArrayBuffer(globalObject).?.ptr));
const result_size = BoringSSL.EC_POINT_point2oct(group, point, form, buffer_ptr, size, null);
- std.debug.assert(result_size == size);
+ bun.assert(result_size == size);
result.put(globalObject, ZigString.static("pubkey"), buffer);
} else {
result.put(globalObject, ZigString.static("pubkey"), JSValue.jsUndefined());
diff --git a/src/bun.js/api/filesystem_router.zig b/src/bun.js/api/filesystem_router.zig
index d1b69863228e3f..fdc9f7319e4773 100644
--- a/src/bun.js/api/filesystem_router.zig
+++ b/src/bun.js/api/filesystem_router.zig
@@ -525,7 +525,7 @@ pub const MatchedRoute = struct {
const entry_name = entry.name;
var str = ZigString.init(entry_name).withEncoding();
- std.debug.assert(entry.values.len > 0);
+ bun.assert(entry.values.len > 0);
if (entry.values.len > 1) {
var values = query_string_value_refs_buf[0..entry.values.len];
for (entry.values, 0..) |value, i| {
diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig
index 724b91703cff66..5c3dbb49916fc3 100644
--- a/src/bun.js/api/server.zig
+++ b/src/bun.js/api/server.zig
@@ -1333,7 +1333,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
return;
};
ctx.response_jsvalue = value;
- std.debug.assert(!ctx.flags.response_protected);
+ assert(!ctx.flags.response_protected);
ctx.flags.response_protected = true;
JSC.C.JSValueProtect(ctx.server.globalThis, value.asObjectRef());
@@ -1574,7 +1574,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
pub fn onWritableResponseBuffer(this: *RequestContext, _: u64, resp: *App.Response) callconv(.C) bool {
ctxLog("onWritableResponseBuffer", .{});
- std.debug.assert(this.resp == resp);
+ assert(this.resp == resp);
if (this.flags.aborted) {
this.finalizeForAbort();
return false;
@@ -1587,7 +1587,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
// TODO: should we cork?
pub fn onWritableCompleteResponseBufferAndMetadata(this: *RequestContext, write_offset: u64, resp: *App.Response) callconv(.C) bool {
ctxLog("onWritableCompleteResponseBufferAndMetadata", .{});
- std.debug.assert(this.resp == resp);
+ assert(this.resp == resp);
if (this.flags.aborted) {
this.finalizeForAbort();
@@ -1609,7 +1609,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
pub fn onWritableCompleteResponseBuffer(this: *RequestContext, write_offset: u64, resp: *App.Response) callconv(.C) bool {
ctxLog("onWritableCompleteResponseBuffer", .{});
- std.debug.assert(this.resp == resp);
+ assert(this.resp == resp);
if (this.flags.aborted) {
this.finalizeForAbort();
return false;
@@ -1646,8 +1646,8 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
}
pub fn onAbort(this: *RequestContext, resp: *App.Response) void {
- std.debug.assert(this.resp == resp);
- std.debug.assert(!this.flags.aborted);
+ assert(this.resp == resp);
+ assert(!this.flags.aborted);
// mark request as aborted
this.flags.aborted = true;
var any_js_calls = false;
@@ -1823,10 +1823,10 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
ctxLog("deinit ({*})", .{this});
if (comptime Environment.allow_assert)
- std.debug.assert(this.flags.has_finalized);
+ assert(this.flags.has_finalized);
if (comptime Environment.allow_assert)
- std.debug.assert(this.flags.has_marked_complete);
+ assert(this.flags.has_marked_complete);
var server = this.server;
this.request_body_buf.clearAndFree(this.allocator);
@@ -1855,7 +1855,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
pub fn writeStatus(this: *RequestContext, status: u16) void {
var status_text_buf: [48]u8 = undefined;
- std.debug.assert(!this.flags.has_written_status);
+ assert(!this.flags.has_written_status);
this.flags.has_written_status = true;
if (this.resp) |resp| {
@@ -1951,7 +1951,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
pub fn onWritableBytes(this: *RequestContext, write_offset: u64, resp: *App.Response) callconv(.C) bool {
ctxLog("onWritableBytes", .{});
- std.debug.assert(this.resp == resp);
+ assert(this.resp == resp);
if (this.flags.aborted) {
this.finalizeForAbort();
return false;
@@ -1966,7 +1966,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
}
pub fn sendWritableBytesForBlob(this: *RequestContext, bytes_: []const u8, write_offset_: u64, resp: *App.Response) bool {
- std.debug.assert(this.resp == resp);
+ assert(this.resp == resp);
const write_offset: usize = write_offset_;
const bytes = bytes_[@min(bytes_.len, @as(usize, @truncate(write_offset)))..];
@@ -1982,7 +1982,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
pub fn sendWritableBytesForCompleteResponseBuffer(this: *RequestContext, bytes_: []const u8, write_offset_: u64, resp: *App.Response) bool {
const write_offset: usize = write_offset_;
- std.debug.assert(this.resp == resp);
+ assert(this.resp == resp);
const bytes = bytes_[@min(bytes_.len, @as(usize, @truncate(write_offset)))..];
if (resp.tryEnd(bytes, bytes_.len, this.shouldCloseConnection())) {
@@ -2151,7 +2151,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
const is_temporary = result.result.is_temporary;
if (comptime Environment.allow_assert) {
- std.debug.assert(this.blob == .Blob);
+ assert(this.blob == .Blob);
}
if (!is_temporary) {
@@ -2248,7 +2248,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
// explicitly set it to a dead pointer
// we use this memory address to disable signals being sent
signal.clear();
- std.debug.assert(signal.isDead());
+ assert(signal.isDead());
// We are already corked!
const assignment_result: JSValue = ResponseStream.JSSink.assignToStream(
@@ -2261,7 +2261,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
assignment_result.ensureStillAlive();
// assert that it was updated
- std.debug.assert(!signal.isDead());
+ assert(!signal.isDead());
if (comptime Environment.allow_assert) {
if (resp.hasResponded()) {
@@ -2747,8 +2747,8 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
},
.Bytes => |byte_stream| {
- std.debug.assert(byte_stream.pipe.ctx == null);
- std.debug.assert(this.byte_stream == null);
+ assert(byte_stream.pipe.ctx == null);
+ assert(this.byte_stream == null);
if (this.resp == null) {
// we don't have a response, so we can discard the stream
stream.done(this.server.globalThis);
@@ -3213,7 +3213,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
pub fn onBufferedBodyChunk(this: *RequestContext, resp: *App.Response, chunk: []const u8, last: bool) void {
ctxLog("onBufferedBodyChunk {} {}", .{ chunk.len, last });
- std.debug.assert(this.resp == resp);
+ assert(this.resp == resp);
this.flags.is_waiting_for_request_body = last == false;
if (this.flags.aborted or this.flags.has_marked_complete) return;
@@ -3230,7 +3230,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
if (body.value == .Locked) {
if (body.value.Locked.readable.get()) |readable| {
if (readable.ptr == .Bytes) {
- std.debug.assert(this.request_body_buf.items.len == 0);
+ assert(this.request_body_buf.items.len == 0);
var vm = this.server.vm;
vm.eventLoop().enter();
defer vm.eventLoop().exit();
@@ -6179,3 +6179,5 @@ pub const DebugHTTPSServer = NewServer(JSC.Codegen.JSDebugHTTPSServer, true, tru
const welcome_page_html_gz = @embedFile("welcome-page.html.gz");
extern fn Bun__addInspector(bool, *anyopaque, *JSC.JSGlobalObject) void;
+
+const assert = bun.assert;
diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig
index 18b98c253d4a85..3248208a5518b1 100644
--- a/src/bun.js/base.zig
+++ b/src/bun.js/base.zig
@@ -414,7 +414,7 @@ pub const ArrayBuffer = extern struct {
pub fn fromTypedArray(ctx: JSC.C.JSContextRef, value: JSC.JSValue) ArrayBuffer {
var out = std.mem.zeroes(ArrayBuffer);
- std.debug.assert(value.asArrayBuffer_(ctx.ptr(), &out));
+ bun.assert(value.asArrayBuffer_(ctx.ptr(), &out));
out.value = value;
return out;
}
@@ -743,7 +743,7 @@ pub export fn MarkedArrayBuffer_deallocator(bytes_: *anyopaque, _: *anyopaque) v
// mimalloc knows the size of things
// but we don't
// if (comptime Environment.allow_assert) {
- // std.debug.assert(mimalloc.mi_check_owned(bytes_) or
+ // bun.assert(mimalloc.mi_check_owned(bytes_) or
// mimalloc.mi_heap_check_owned(JSC.VirtualMachine.get().arena.heap.?, bytes_));
// }
@@ -1690,7 +1690,7 @@ pub const MemoryReportingAllocator = struct {
_ = prev;
if (comptime Environment.allow_assert) {
// check for overflow, racily
- // std.debug.assert(prev > this.memory_cost.load(.Monotonic));
+ // bun.assert(prev > this.memory_cost.load(.Monotonic));
log("free({d}) = {d}", .{ buf.len, this.memory_cost.raw });
}
}
diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig
index d5e81f726f2aa4..2793faff49aa1a 100644
--- a/src/bun.js/bindings/bindings.zig
+++ b/src/bun.js/bindings/bindings.zig
@@ -850,7 +850,7 @@ pub const ZigString = extern struct {
inline fn assertGlobal(this: *const ZigString) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.len == 0 or
+ bun.assert(this.len == 0 or
bun.Mimalloc.mi_is_in_heap_region(untagged(this._unsafe_ptr_do_not_use)) or
bun.Mimalloc.mi_check_owned(untagged(this._unsafe_ptr_do_not_use)));
}
@@ -1705,14 +1705,14 @@ pub const JSCell = extern struct {
pub fn getGetterSetter(this: *JSCell) *GetterSetter {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(JSValue.fromCell(this).isGetterSetter());
+ bun.assert(JSValue.fromCell(this).isGetterSetter());
}
return @as(*GetterSetter, @ptrCast(@alignCast(this)));
}
pub fn getCustomGetterSetter(this: *JSCell) *CustomGetterSetter {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(JSValue.fromCell(this).isCustomGetterSetter());
+ bun.assert(JSValue.fromCell(this).isCustomGetterSetter());
}
return @as(*CustomGetterSetter, @ptrCast(@alignCast(this)));
}
@@ -3038,7 +3038,7 @@ pub const JSGlobalObject = extern struct {
// make bindings -j10
const assertion = this.bunVMUnsafe() == @as(*anyopaque, @ptrCast(JSC.VirtualMachine.get()));
if (!assertion) @breakpoint();
- std.debug.assert(assertion);
+ bun.assert(assertion);
}
return @as(*JSC.VirtualMachine, @ptrCast(@alignCast(this.bunVMUnsafe())));
}
@@ -3717,7 +3717,7 @@ pub const JSValue = enum(JSValueReprInt) {
this: JSValue,
) JSType {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(!this.isEmpty());
+ bun.assert(!this.isEmpty());
}
return cppFn("jsType", .{this});
}
@@ -4684,7 +4684,7 @@ pub const JSValue = enum(JSValueReprInt) {
pub fn then(this: JSValue, global: *JSGlobalObject, ctx: ?*anyopaque, resolve: JSNativeFn, reject: JSNativeFn) void {
if (comptime bun.Environment.allow_assert)
- std.debug.assert(JSValue.fromPtr(ctx).asPtr(anyopaque) == ctx.?);
+ bun.assert(JSValue.fromPtr(ctx).asPtr(anyopaque) == ctx.?);
return this._then(global, JSValue.fromPtr(ctx), resolve, reject);
}
@@ -4723,7 +4723,7 @@ pub const JSValue = enum(JSValueReprInt) {
}
pub fn implementsToString(this: JSValue, global: *JSGlobalObject) bool {
- std.debug.assert(this.isCell());
+ bun.assert(this.isCell());
const function = this.fastGet(global, BuiltinName.toString) orelse return false;
return function.isCell() and function.isCallable(global.vm());
}
@@ -5090,7 +5090,7 @@ pub const JSValue = enum(JSValueReprInt) {
pub inline fn asInt52(this: JSValue) i64 {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.isNumber());
+ bun.assert(this.isNumber());
}
return coerceJSValueDoubleTruncatingTT(i52, i64, this.asNumber());
}
@@ -5105,8 +5105,8 @@ pub const JSValue = enum(JSValueReprInt) {
}
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(!this.isString()); // use coerce() instead
- std.debug.assert(!this.isCell()); // use coerce() instead
+ bun.assert(!this.isString()); // use coerce() instead
+ bun.assert(!this.isCell()); // use coerce() instead
}
// TODO: this shouldn't be reachable.
@@ -5122,13 +5122,13 @@ pub const JSValue = enum(JSValueReprInt) {
// case but is bad code practice to misuse JSValue casts.
//
// if (bun.Environment.allow_assert) {
- // std.debug.assert(this.isInt32());
+ // bun.assert(this.isInt32());
// }
return FFI.JSVALUE_TO_INT32(.{ .asJSValue = this });
}
pub fn asFileDescriptor(this: JSValue) bun.FileDescriptor {
- std.debug.assert(this.isNumber());
+ bun.assert(this.isNumber());
return bun.FDImpl.fromUV(this.toInt32()).encode();
}
@@ -5457,7 +5457,7 @@ pub const JSValue = enum(JSValueReprInt) {
// we assume the class name is ASCII text
const data = out.latin1();
if (bun.Environment.allow_assert) {
- std.debug.assert(bun.strings.isAllASCII(data));
+ bun.assert(bun.strings.isAllASCII(data));
}
return data;
}
diff --git a/src/bun.js/bindings/exports.zig b/src/bun.js/bindings/exports.zig
index a70c1173f4f0a1..e7ba72d48a5479 100644
--- a/src/bun.js/bindings/exports.zig
+++ b/src/bun.js/bindings/exports.zig
@@ -216,7 +216,7 @@ export fn ZigString__free(raw: [*]const u8, len: usize, allocator_: ?*anyopaque)
var allocator: std.mem.Allocator = @as(*std.mem.Allocator, @ptrCast(@alignCast(allocator_ orelse return))).*;
var ptr = ZigString.init(raw[0..len]).slice().ptr;
if (comptime Environment.allow_assert) {
- std.debug.assert(Mimalloc.mi_is_in_heap_region(ptr));
+ bun.assert(Mimalloc.mi_is_in_heap_region(ptr));
}
const str = ptr[0..len];
@@ -226,7 +226,7 @@ export fn ZigString__free(raw: [*]const u8, len: usize, allocator_: ?*anyopaque)
export fn ZigString__free_global(ptr: [*]const u8, len: usize) void {
const untagged = @as(*anyopaque, @ptrFromInt(@intFromPtr(ZigString.init(ptr[0..len]).slice().ptr)));
if (comptime Environment.allow_assert) {
- std.debug.assert(Mimalloc.mi_is_in_heap_region(ptr));
+ bun.assert(Mimalloc.mi_is_in_heap_region(ptr));
}
// we must untag the string pointer
Mimalloc.mi_free(untagged);
diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig
index 24fd49265eab1f..7bb86e3714904e 100644
--- a/src/bun.js/event_loop.zig
+++ b/src/bun.js/event_loop.zig
@@ -1789,7 +1789,7 @@ pub const MiniEventLoop = struct {
pub fn deinit(this: *MiniEventLoop) void {
this.tasks.deinit();
- std.debug.assert(this.concurrent_tasks.isEmpty());
+ bun.assert(this.concurrent_tasks.isEmpty());
}
pub fn tickConcurrentWithCount(this: *MiniEventLoop) usize {
diff --git a/src/bun.js/ipc.zig b/src/bun.js/ipc.zig
index 7a4590ef77fa99..5d62483e2ed6c0 100644
--- a/src/bun.js/ipc.zig
+++ b/src/bun.js/ipc.zig
@@ -242,7 +242,7 @@ const SocketIPCData = struct {
pub fn writeVersionPacket(this: *SocketIPCData) void {
if (Environment.allow_assert) {
- std.debug.assert(this.has_written_version == 0);
+ bun.assert(this.has_written_version == 0);
}
const bytes = getVersionPacket(this.mode);
if (bytes.len > 0) {
@@ -258,7 +258,7 @@ const SocketIPCData = struct {
pub fn serializeAndSend(ipc_data: *SocketIPCData, global: *JSGlobalObject, value: JSValue) bool {
if (Environment.allow_assert) {
- std.debug.assert(ipc_data.has_written_version == 1);
+ bun.assert(ipc_data.has_written_version == 1);
}
// TODO: probably we should not direct access ipc_data.outgoing.list.items here
@@ -267,10 +267,10 @@ const SocketIPCData = struct {
const payload_length = serialize(ipc_data, &ipc_data.outgoing, global, value) catch
return false;
- std.debug.assert(ipc_data.outgoing.list.items.len == start_offset + payload_length);
+ bun.assert(ipc_data.outgoing.list.items.len == start_offset + payload_length);
if (start_offset == 0) {
- std.debug.assert(ipc_data.outgoing.cursor == 0);
+ bun.assert(ipc_data.outgoing.cursor == 0);
const n = ipc_data.socket.write(ipc_data.outgoing.list.items.ptr[start_offset..payload_length], false);
if (n == payload_length) {
ipc_data.outgoing.reset();
@@ -346,7 +346,7 @@ const NamedPipeIPCData = struct {
pub fn writeVersionPacket(this: *NamedPipeIPCData) void {
if (Environment.allow_assert) {
- std.debug.assert(this.has_written_version == 0);
+ bun.assert(this.has_written_version == 0);
}
const bytes = getVersionPacket(this.mode);
if (bytes.len > 0) {
@@ -364,7 +364,7 @@ const NamedPipeIPCData = struct {
pub fn serializeAndSend(this: *NamedPipeIPCData, global: *JSGlobalObject, value: JSValue) bool {
if (Environment.allow_assert) {
- std.debug.assert(this.has_written_version == 1);
+ bun.assert(this.has_written_version == 1);
}
const start_offset = this.writer.outgoing.list.items.len;
@@ -372,10 +372,10 @@ const NamedPipeIPCData = struct {
const payload_length: usize = serialize(this, &this.writer.outgoing, global, value) catch
return false;
- std.debug.assert(this.writer.outgoing.list.items.len == start_offset + payload_length);
+ bun.assert(this.writer.outgoing.list.items.len == start_offset + payload_length);
if (start_offset == 0) {
- std.debug.assert(this.writer.outgoing.cursor == 0);
+ bun.assert(this.writer.outgoing.cursor == 0);
if (this.connected) {
_ = this.writer.flush();
}
@@ -634,8 +634,8 @@ fn NewNamedPipeIPCHandler(comptime Context: type) type {
ipc.incoming.len += @as(u32, @truncate(buffer.len));
var slice = ipc.incoming.slice();
- std.debug.assert(ipc.incoming.len <= ipc.incoming.cap);
- std.debug.assert(bun.isSliceInBuffer(buffer, ipc.incoming.allocatedSlice()));
+ bun.assert(ipc.incoming.len <= ipc.incoming.cap);
+ bun.assert(bun.isSliceInBuffer(buffer, ipc.incoming.allocatedSlice()));
const globalThis = switch (@typeInfo(@TypeOf(this.globalThis))) {
.Pointer => this.globalThis,
diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig
index 25ce1c407b9243..c1b2ca7f5a6430 100644
--- a/src/bun.js/javascript.zig
+++ b/src/bun.js/javascript.zig
@@ -1065,7 +1065,7 @@ pub const VirtualMachine = struct {
pub export fn Debugger__didConnect() void {
var this = VirtualMachine.get();
- std.debug.assert(this.debugger.?.wait_for_connection);
+ bun.assert(this.debugger.?.wait_for_connection);
this.debugger.?.wait_for_connection = false;
this.debugger.?.poll_ref.unref(this);
}
@@ -1600,7 +1600,7 @@ pub const VirtualMachine = struct {
pub fn refCountedStringWithWasNew(this: *VirtualMachine, new: *bool, input_: []const u8, hash_: ?u32, comptime dupe: bool) *JSC.RefString {
JSC.markBinding(@src());
- std.debug.assert(input_.len > 0);
+ bun.assert(input_.len > 0);
const hash = hash_ orelse JSC.RefString.computeHash(input_);
this.ref_strings_mutex.lock();
defer this.ref_strings_mutex.unlock();
@@ -1629,7 +1629,7 @@ pub const VirtualMachine = struct {
}
pub fn refCountedString(this: *VirtualMachine, input_: []const u8, hash_: ?u32, comptime dupe: bool) *JSC.RefString {
- std.debug.assert(input_.len > 0);
+ bun.assert(input_.len > 0);
var _was_new = false;
return this.refCountedStringWithWasNew(&_was_new, input_, hash_, comptime dupe);
}
@@ -1642,7 +1642,7 @@ pub const VirtualMachine = struct {
log: *logger.Log,
comptime flags: FetchFlags,
) anyerror!ResolvedSource {
- std.debug.assert(VirtualMachine.isLoaded());
+ bun.assert(VirtualMachine.isLoaded());
if (try ModuleLoader.fetchBuiltinModule(jsc_vm, _specifier)) |builtin| {
return builtin;
@@ -1726,7 +1726,7 @@ pub const VirtualMachine = struct {
is_esm: bool,
comptime is_a_file_path: bool,
) !void {
- std.debug.assert(VirtualMachine.isLoaded());
+ bun.assert(VirtualMachine.isLoaded());
// macOS threadlocal vars are very slow
// we won't change threads in this function
// so we can copy it here
diff --git a/src/bun.js/javascript_core_c_api.zig b/src/bun.js/javascript_core_c_api.zig
index 00a7be2ded6cb4..9f02acf6090e0d 100644
--- a/src/bun.js/javascript_core_c_api.zig
+++ b/src/bun.js/javascript_core_c_api.zig
@@ -33,14 +33,14 @@ pub const OpaqueJSString = opaque {
pub fn characters16(this: *OpaqueJSString) UTF16Ptr {
if (comptime bun.Environment.allow_assert)
- std.debug.assert(this.is16Bit());
+ bun.assert(this.is16Bit());
return JSStringGetCharactersPtr(this);
}
pub fn characters8(this: *OpaqueJSString) UTF8Ptr {
if (comptime bun.Environment.allow_assert)
- std.debug.assert(!this.is16Bit());
+ bun.assert(!this.is16Bit());
return JSStringGetCharacters8Ptr(this);
}
diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig
index 78ba52108a5c29..9f87dcb193a833 100644
--- a/src/bun.js/module_loader.zig
+++ b/src/bun.js/module_loader.zig
@@ -711,7 +711,7 @@ pub const ModuleLoader = struct {
/// This must be called after calling transpileSourceCode
pub fn resetArena(this: *ModuleLoader, jsc_vm: *VirtualMachine) void {
- std.debug.assert(&jsc_vm.module_loader == this);
+ bun.assert(&jsc_vm.module_loader == this);
if (this.transpile_source_code_arena) |arena| {
if (jsc_vm.smol) {
_ = arena.reset(.free_all);
@@ -991,7 +991,7 @@ pub const ModuleLoader = struct {
}
const package = pm.lockfile.packages.get(package_id);
- std.debug.assert(package.resolution.tag != .root);
+ bun.assert(package.resolution.tag != .root);
switch (pm.determinePreinstallState(package, pm.lockfile)) {
.done => {
@@ -2277,7 +2277,7 @@ pub const ModuleLoader = struct {
FetchFlags.transpile,
) catch |err| {
if (err == error.AsyncModule) {
- std.debug.assert(promise != null);
+ bun.assert(promise != null);
return promise;
}
@@ -2483,7 +2483,7 @@ pub const ModuleLoader = struct {
) bool {
JSC.markBinding(@src());
const jsc_vm = globalObject.bunVM();
- std.debug.assert(jsc_vm.plugin_runner != null);
+ bun.assert(jsc_vm.plugin_runner != null);
var specifier_slice = specifier_ptr.toUTF8(jsc_vm.allocator);
const specifier = specifier_slice.slice();
diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig
index 13fac010b89138..a41af318cfd487 100644
--- a/src/bun.js/node/node_fs.zig
+++ b/src/bun.js/node/node_fs.zig
@@ -447,7 +447,7 @@ pub const AsyncReaddirRecursiveTask = struct {
.basename = bun.PathString.init(bun.default_allocator.dupeZ(u8, basename) catch bun.outOfMemory()),
},
);
- std.debug.assert(readdir_task.subtask_count.fetchAdd(1, .Monotonic) > 0);
+ bun.assert(readdir_task.subtask_count.fetchAdd(1, .Monotonic) > 0);
JSC.WorkPool.schedule(&task.task);
}
@@ -570,7 +570,7 @@ pub const AsyncReaddirRecursiveTask = struct {
return;
}
- std.debug.assert(this.subtask_count.load(.Monotonic) == 0);
+ bun.assert(this.subtask_count.load(.Monotonic) == 0);
const root_fd = this.root_fd;
if (root_fd != bun.invalid_fd) {
@@ -672,7 +672,7 @@ pub const AsyncReaddirRecursiveTask = struct {
}
pub fn deinit(this: *AsyncReaddirRecursiveTask) void {
- std.debug.assert(this.root_fd == bun.invalid_fd); // should already have closed it
+ bun.assert(this.root_fd == bun.invalid_fd); // should already have closed it
if (this.pending_err) |*err| {
bun.default_allocator.free(err.path);
}
@@ -4559,7 +4559,7 @@ pub const NodeFS = struct {
}
fn _read(_: *NodeFS, args: Arguments.Read, comptime _: Flavor) Maybe(Return.Read) {
- if (Environment.allow_assert) std.debug.assert(args.position == null);
+ if (Environment.allow_assert) bun.assert(args.position == null);
var buf = args.buffer.slice();
buf = buf[@min(args.offset, buf.len)..];
buf = buf[0..@min(buf.len, args.length)];
@@ -5536,7 +5536,7 @@ pub const NodeFS = struct {
} };
// Seems like `rc` does not contain the errno?
- std.debug.assert(rc.errEnum() == null);
+ bun.assert(rc.errEnum() == null);
const buf = bun.span(req.ptrAs([*:0]u8));
return .{
@@ -5559,7 +5559,7 @@ pub const NodeFS = struct {
var outbuf: [bun.MAX_PATH_BYTES]u8 = undefined;
var inbuf = &this.sync_error_buf;
- if (comptime Environment.allow_assert) std.debug.assert(FileSystem.instance_loaded);
+ if (comptime Environment.allow_assert) bun.assert(FileSystem.instance_loaded);
const path_slice = args.path.slice();
@@ -5863,7 +5863,7 @@ pub const NodeFS = struct {
}
pub fn watchFile(_: *NodeFS, args: Arguments.WatchFile, comptime flavor: Flavor) Maybe(Return.WatchFile) {
- std.debug.assert(flavor == .sync);
+ bun.assert(flavor == .sync);
const watcher = args.createStatWatcher() catch |err| {
const buf = std.fmt.allocPrint(bun.default_allocator, "Failed to watch file {}", .{bun.fmt.QuotedFormatter{ .text = args.path.slice() }}) catch bun.outOfMemory();
@@ -5903,8 +5903,8 @@ pub const NodeFS = struct {
Maybe(Return.Utimes).success;
}
- std.debug.assert(args.mtime.tv_nsec <= 1e9);
- std.debug.assert(args.atime.tv_nsec <= 1e9);
+ bun.assert(args.mtime.tv_nsec <= 1e9);
+ bun.assert(args.atime.tv_nsec <= 1e9);
var times = [2]std.c.timeval{
.{
.tv_sec = args.atime.tv_sec,
@@ -5943,8 +5943,8 @@ pub const NodeFS = struct {
Maybe(Return.Utimes).success;
}
- std.debug.assert(args.mtime.tv_nsec <= 1e9);
- std.debug.assert(args.atime.tv_nsec <= 1e9);
+ bun.assert(args.mtime.tv_nsec <= 1e9);
+ bun.assert(args.atime.tv_nsec <= 1e9);
var times = [2]std.c.timeval{
.{
.tv_sec = args.atime.tv_sec,
@@ -5988,7 +5988,7 @@ pub const NodeFS = struct {
/// This function is `cpSync`, but only if you pass `{ recursive: ..., force: ..., errorOnExist: ..., mode: ... }'
/// The other options like `filter` use a JS fallback, see `src/js/internal/fs/cp.ts`
pub fn cp(this: *NodeFS, args: Arguments.Cp, comptime flavor: Flavor) Maybe(Return.Cp) {
- comptime std.debug.assert(flavor == .sync);
+ comptime bun.assert(flavor == .sync);
var src_buf: bun.PathBuffer = undefined;
var dest_buf: bun.PathBuffer = undefined;
diff --git a/src/bun.js/node/node_fs_stat_watcher.zig b/src/bun.js/node/node_fs_stat_watcher.zig
index d546143e882478..866aef3db9dda7 100644
--- a/src/bun.js/node/node_fs_stat_watcher.zig
+++ b/src/bun.js/node/node_fs_stat_watcher.zig
@@ -48,8 +48,8 @@ pub const StatWatcherScheduler = struct {
pub fn append(this: *StatWatcherScheduler, watcher: *StatWatcher) void {
log("append new watcher {s}", .{watcher.path});
- std.debug.assert(watcher.closed == false);
- std.debug.assert(watcher.next == null);
+ bun.assert(watcher.closed == false);
+ bun.assert(watcher.next == null);
if (this.head.swap(watcher, .Monotonic)) |head| {
watcher.next = head;
@@ -173,7 +173,7 @@ pub const StatWatcher = struct {
pub fn deinit(this: *StatWatcher) void {
log("deinit\n", .{});
- std.debug.assert(!this.hasPendingActivity());
+ bun.assert(!this.hasPendingActivity());
if (this.persistent) {
this.persistent = false;
diff --git a/src/bun.js/node/node_fs_watcher.zig b/src/bun.js/node/node_fs_watcher.zig
index b3a13a8915ab59..5f1ed8f6d7f833 100644
--- a/src/bun.js/node/node_fs_watcher.zig
+++ b/src/bun.js/node/node_fs_watcher.zig
@@ -147,7 +147,7 @@ pub const FSWatcher = struct {
pub fn deinit(this: *FSWatchTask) void {
this.cleanEntries();
if (comptime Environment.allow_assert) {
- std.debug.assert(&this.ctx.current_task != this);
+ bun.assert(&this.ctx.current_task != this);
}
this.destroy();
}
diff --git a/src/bun.js/node/node_os.zig b/src/bun.js/node/node_os.zig
index d48f99c9d76d22..800cad45acb06d 100644
--- a/src/bun.js/node/node_os.zig
+++ b/src/bun.js/node/node_os.zig
@@ -865,7 +865,7 @@ pub const OS = struct {
/// `@TypeOf(mask)` must be one of u32 (IPv4) or u128 (IPv6)
fn netmaskToCIDRSuffix(mask: anytype) ?u8 {
const T = @TypeOf(mask);
- comptime std.debug.assert(T == u32 or T == u128);
+ comptime bun.assert(T == u32 or T == u128);
const mask_bits = @byteSwap(mask);
diff --git a/src/bun.js/node/win_watcher.zig b/src/bun.js/node/win_watcher.zig
index 12225daab53be6..a7798b09b1093a 100644
--- a/src/bun.js/node/win_watcher.zig
+++ b/src/bun.js/node/win_watcher.zig
@@ -44,7 +44,7 @@ pub const PathWatcherManager = struct {
if (std.mem.indexOfScalar(*PathWatcher, this.watchers.values(), watcher)) |index| {
if (comptime bun.Environment.isDebug) {
if (path.len > 0)
- std.debug.assert(bun.strings.eql(this.watchers.keys()[index], path));
+ bun.assert(bun.strings.eql(this.watchers.keys()[index], path));
}
bun.default_allocator.free(this.watchers.keys()[index]);
@@ -118,7 +118,7 @@ pub const PathWatcher = struct {
}
const this: *PathWatcher = @alignCast(@fieldParentPtr(PathWatcher, "handle", event));
if (comptime bun.Environment.isDebug) {
- std.debug.assert(event.data == @as(?*anyopaque, @ptrCast(this)));
+ bun.assert(event.data == @as(?*anyopaque, @ptrCast(this)));
}
const timestamp = event.loop.time;
diff --git a/src/bun.js/test/expect.zig b/src/bun.js/test/expect.zig
index 653bc613572fe9..ad65756da0f9be 100644
--- a/src/bun.js/test/expect.zig
+++ b/src/bun.js/test/expect.zig
@@ -2130,7 +2130,7 @@ pub const Expect = struct {
}
// If it's not an object, we are going to crash here.
- std.debug.assert(expected_value.isObject());
+ assert(expected_value.isObject());
if (expected_value.get(globalObject, "message")) |expected_message| {
if (_received_message) |received_message| {
@@ -4155,7 +4155,7 @@ pub const Expect = struct {
// call the custom matcher implementation
var result = matcher_fn.callWithThis(globalObject, matcher_context_jsvalue, args);
- std.debug.assert(!result.isEmpty());
+ assert(!result.isEmpty());
if (result.toError()) |err| {
globalObject.throwValue(err);
return false;
@@ -4177,7 +4177,7 @@ pub const Expect = struct {
}
result = promise.result(vm);
result.ensureStillAlive();
- std.debug.assert(!result.isEmpty());
+ assert(!result.isEmpty());
switch (promise.status(vm)) {
.Pending => unreachable,
.Fulfilled => {},
@@ -4231,10 +4231,10 @@ pub const Expect = struct {
message_text = message.toBunString(globalObject);
} else {
if (comptime Environment.allow_assert)
- std.debug.assert(message.isCallable(globalObject.vm())); // checked above
+ assert(message.isCallable(globalObject.vm())); // checked above
var message_result = message.callWithGlobalThis(globalObject, &[_]JSValue{});
- std.debug.assert(!message_result.isEmpty());
+ assert(!message_result.isEmpty());
if (message_result.toError()) |err| {
globalObject.throwValue(err);
return false;
@@ -5153,3 +5153,5 @@ comptime {
fn incrementExpectCallCounter() void {
active_test_expectation_counter.actual += 1;
}
+
+const assert = bun.assert;
diff --git a/src/bun.js/test/jest.zig b/src/bun.js/test/jest.zig
index 8aad1acdbdcde8..518d4e86070883 100644
--- a/src/bun.js/test/jest.zig
+++ b/src/bun.js/test/jest.zig
@@ -824,8 +824,8 @@ pub const DescribeScope = struct {
if (comptime is_bindgen) return;
if (new.parent) |scope| {
if (comptime Environment.allow_assert) {
- std.debug.assert(DescribeScope.active != new);
- std.debug.assert(scope == DescribeScope.active);
+ assert(DescribeScope.active != new);
+ assert(scope == DescribeScope.active);
}
} else if (DescribeScope.active) |scope| {
// calling Bun.jest() within (already active) module
@@ -836,7 +836,7 @@ pub const DescribeScope = struct {
pub fn pop(this: *DescribeScope) void {
if (comptime is_bindgen) return;
- if (comptime Environment.allow_assert) std.debug.assert(DescribeScope.active == this);
+ if (comptime Environment.allow_assert) assert(DescribeScope.active == this);
DescribeScope.active = this.parent;
}
@@ -917,8 +917,8 @@ pub const DescribeScope = struct {
for (hooks.items) |cb| {
if (comptime Environment.allow_assert) {
- std.debug.assert(cb.isObject());
- std.debug.assert(cb.isCallable(globalObject.vm()));
+ assert(cb.isObject());
+ assert(cb.isCallable(globalObject.vm()));
}
defer {
if (comptime hook == .beforeAll or hook == .afterAll) {
@@ -974,8 +974,8 @@ pub const DescribeScope = struct {
for (hooks.items) |cb| {
if (comptime Environment.allow_assert) {
- std.debug.assert(cb.isObject());
- std.debug.assert(cb.isCallable(globalThis.vm()));
+ assert(cb.isObject());
+ assert(cb.isCallable(globalThis.vm()));
}
defer {
if (comptime hook == .beforeAll or hook == .afterAll) {
@@ -1379,7 +1379,7 @@ pub const TestRunnerTask = struct {
}
pub fn timeout(this: *TestRunnerTask) void {
- if (comptime Environment.allow_assert) std.debug.assert(!this.reported);
+ if (comptime Environment.allow_assert) assert(!this.reported);
this.ref.unref(this.globalThis.bunVM());
this.globalThis.throwTerminationException();
@@ -1389,7 +1389,7 @@ pub const TestRunnerTask = struct {
pub fn handleResult(this: *TestRunnerTask, result: Result, comptime from: @Type(.EnumLiteral)) void {
switch (comptime from) {
.promise => {
- if (comptime Environment.allow_assert) std.debug.assert(this.promise_state == .pending);
+ if (comptime Environment.allow_assert) assert(this.promise_state == .pending);
this.promise_state = .fulfilled;
if (this.done_callback_state == .pending and result == .pass) {
@@ -1397,7 +1397,7 @@ pub const TestRunnerTask = struct {
}
},
.callback => {
- if (comptime Environment.allow_assert) std.debug.assert(this.done_callback_state == .pending);
+ if (comptime Environment.allow_assert) assert(this.done_callback_state == .pending);
this.done_callback_state = .fulfilled;
if (this.promise_state == .pending and result == .pass) {
@@ -1405,7 +1405,7 @@ pub const TestRunnerTask = struct {
}
},
.sync => {
- if (comptime Environment.allow_assert) std.debug.assert(this.sync_state == .pending);
+ if (comptime Environment.allow_assert) assert(this.sync_state == .pending);
this.sync_state = .fulfilled;
},
.timeout, .unhandledRejection => {},
@@ -2019,3 +2019,5 @@ fn callJSFunctionForTestRunner(vm: *JSC.VirtualMachine, globalObject: *JSC.JSGlo
return result;
}
+
+const assert = bun.assert;
diff --git a/src/bun.js/unbounded_queue.zig b/src/bun.js/unbounded_queue.zig
index bc609bc3b0254b..85287a899cc86f 100644
--- a/src/bun.js/unbounded_queue.zig
+++ b/src/bun.js/unbounded_queue.zig
@@ -7,7 +7,7 @@ const atomic = std.atomic;
const builtin = std.builtin;
const testing = std.testing;
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
const mpsc = @This();
diff --git a/src/bun.js/web_worker.zig b/src/bun.js/web_worker.zig
index 3f03db1fcb20c9..083abca8cb6dce 100644
--- a/src/bun.js/web_worker.zig
+++ b/src/bun.js/web_worker.zig
@@ -150,8 +150,8 @@ pub const WebWorker = struct {
return;
}
- std.debug.assert(this.status.load(.Acquire) == .start);
- std.debug.assert(this.vm == null);
+ assert(this.status.load(.Acquire) == .start);
+ assert(this.vm == null);
this.arena = try bun.MimallocArena.init();
var vm = try JSC.VirtualMachine.initWorker(this, .{
.allocator = this.arena.allocator(),
@@ -262,7 +262,7 @@ pub const WebWorker = struct {
log("[{d}] spin start", .{this.execution_context_id});
var vm = this.vm.?;
- std.debug.assert(this.status.load(.Acquire) == .start);
+ assert(this.status.load(.Acquire) == .start);
this.setStatus(.starting);
var promise = vm.loadEntryPointForWebWorker(this.specifier) catch {
@@ -390,3 +390,5 @@ pub const WebWorker = struct {
}
}
};
+
+const assert = bun.assert;
diff --git a/src/bun.js/webcore.zig b/src/bun.js/webcore.zig
index 0bfc83753d3aa5..2e4b9ff7114b93 100644
--- a/src/bun.js/webcore.zig
+++ b/src/bun.js/webcore.zig
@@ -322,8 +322,8 @@ pub const Prompt = struct {
}
if (comptime Environment.allow_assert) {
- std.debug.assert(input.items.len > 0);
- std.debug.assert(input.items[input.items.len - 1] != '\r');
+ bun.assert(input.items.len > 0);
+ bun.assert(input.items[input.items.len - 1] != '\r');
}
// 8. Let result be null if the user aborts, or otherwise the string
diff --git a/src/bun.js/webcore/blob.zig b/src/bun.js/webcore/blob.zig
index 3537cd9f994afd..db20eb2ba79ad9 100644
--- a/src/bun.js/webcore/blob.zig
+++ b/src/bun.js/webcore/blob.zig
@@ -478,7 +478,7 @@ pub const Blob = struct {
const blob = _onStructuredCloneDeserialize(globalThis, @TypeOf(reader), reader) catch return .zero;
if (Environment.allow_assert) {
- std.debug.assert(total_length - reader.context.pos == reserved_space_for_serialization);
+ assert(total_length - reader.context.pos == reserved_space_for_serialization);
}
return blob;
@@ -653,7 +653,7 @@ pub const Blob = struct {
);
} else {
if (Environment.allow_assert) {
- comptime std.debug.assert(Environment.isWindows);
+ comptime assert(Environment.isWindows);
@panic("this shouldn't be reachable.");
}
try writer.print(
@@ -1602,7 +1602,7 @@ pub const Blob = struct {
};
pub fn ref(this: *Store) void {
- std.debug.assert(this.ref_count > 0);
+ assert(this.ref_count > 0);
this.ref_count += 1;
}
@@ -1658,7 +1658,7 @@ pub const Blob = struct {
}
pub fn deref(this: *Blob.Store) void {
- std.debug.assert(this.ref_count >= 1);
+ assert(this.ref_count >= 1);
this.ref_count -= 1;
if (this.ref_count == 0) {
this.deinit();
@@ -2036,7 +2036,7 @@ pub const Blob = struct {
fn onCopyFile(req: *libuv.fs_t) callconv(.C) void {
var this: *CopyFileWindows = @fieldParentPtr(CopyFileWindows, "io_request", req);
- std.debug.assert(req.data == @as(?*anyopaque, @ptrCast(this)));
+ assert(req.data == @as(?*anyopaque, @ptrCast(this)));
var event_loop = this.event_loop;
event_loop.virtual_machine.event_loop_handle.?.unrefConcurrently();
const rc = req.result;
@@ -2139,7 +2139,7 @@ pub const Blob = struct {
fn onMkdirpCompleteConcurrent(this: *CopyFileWindows, err_: JSC.Maybe(void)) void {
bun.sys.syslog("mkdirp complete", .{});
- std.debug.assert(this.err == null);
+ assert(this.err == null);
this.err = if (err_ == .err) err_.err else null;
this.event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.create(JSC.ManagedTask.New(CopyFileWindows, onMkdirpComplete).init(this)));
}
@@ -2615,8 +2615,8 @@ pub const Blob = struct {
return;
}
- std.debug.assert(this.destination_fd != invalid_fd);
- std.debug.assert(this.source_fd != invalid_fd);
+ assert(this.destination_fd != invalid_fd);
+ assert(this.source_fd != invalid_fd);
if (this.destination_file_store.pathlike == .fd) {}
@@ -3604,7 +3604,7 @@ pub const Blob = struct {
pub fn toJS(this: *Blob, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
// if (comptime Environment.allow_assert) {
- // std.debug.assert(this.allocator != null);
+ // assert(this.allocator != null);
// }
this.calculateEstimatedByteSize();
@@ -3697,7 +3697,7 @@ pub const Blob = struct {
},
.transfer => {
const store = this.store.?;
- std.debug.assert(store.data == .bytes);
+ assert(store.data == .bytes);
this.transfer();
// we don't need to worry about UTF-8 BOM in this case because the store owns the memory.
return ZigString.init(buf).external(global, store, Store.external);
@@ -4498,7 +4498,7 @@ pub const InlineBlob = extern struct {
pub fn concat(first: []const u8, second: []const u8) InlineBlob {
const total = first.len + second.len;
- std.debug.assert(total <= available_bytes);
+ assert(total <= available_bytes);
var inline_blob: JSC.WebCore.InlineBlob = .{};
var bytes_slice = inline_blob.bytes[0..total];
@@ -4514,7 +4514,7 @@ pub const InlineBlob = extern struct {
}
fn internalInit(data: []const u8, was_string: bool) InlineBlob {
- std.debug.assert(data.len <= available_bytes);
+ assert(data.len <= available_bytes);
var blob = InlineBlob{
.len = @as(IntSize, @intCast(data.len)),
@@ -4574,3 +4574,5 @@ pub const InlineBlob = extern struct {
pub fn clearAndFree(_: *@This()) void {}
};
+
+const assert = bun.assert;
diff --git a/src/bun.js/webcore/blob/WriteFile.zig b/src/bun.js/webcore/blob/WriteFile.zig
index ffc13161d671ef..25a724a05a714e 100644
--- a/src/bun.js/webcore/blob/WriteFile.zig
+++ b/src/bun.js/webcore/blob/WriteFile.zig
@@ -137,7 +137,7 @@ pub const WriteFile = struct {
wrote: *usize,
) bool {
const fd = this.opened_fd;
- std.debug.assert(fd != invalid_fd);
+ bun.assert(fd != invalid_fd);
const result: JSC.Maybe(usize) =
// We do not use pwrite() because the file may not be
@@ -446,7 +446,7 @@ pub const WriteFileWindows = struct {
// libuv always returns 0 when a callback is specified
if (rc.errEnum()) |err| {
- std.debug.assert(err != .NOENT);
+ bun.assert(err != .NOENT);
this.throw(.{
.errno = @intFromEnum(err),
@@ -460,7 +460,7 @@ pub const WriteFileWindows = struct {
pub fn onOpen(req: *uv.fs_t) callconv(.C) void {
var this: *WriteFileWindows = @fieldParentPtr(WriteFileWindows, "io_request", req);
- std.debug.assert(this == @as(*WriteFileWindows, @alignCast(@ptrCast(req.data.?))));
+ bun.assert(this == @as(*WriteFileWindows, @alignCast(@ptrCast(req.data.?))));
const rc = this.io_request.result;
if (comptime Environment.allow_assert)
log("onOpen({s}) = {}", .{ this.file_blob.store.?.data.file.pathlike.path.slice(), rc });
@@ -518,14 +518,14 @@ pub const WriteFileWindows = struct {
fn onMkdirpCompleteConcurrent(this: *WriteFileWindows, err_: JSC.Maybe(void)) void {
log("mkdirp complete", .{});
- std.debug.assert(this.err == null);
+ bun.assert(this.err == null);
this.err = if (err_ == .err) err_.err else null;
this.event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.create(JSC.ManagedTask.New(WriteFileWindows, onMkdirpComplete).init(this)));
}
fn onWriteComplete(req: *uv.fs_t) callconv(.C) void {
var this: *WriteFileWindows = @fieldParentPtr(WriteFileWindows, "io_request", req);
- std.debug.assert(this == @as(*WriteFileWindows, @alignCast(@ptrCast(req.data.?))));
+ bun.assert(this == @as(*WriteFileWindows, @alignCast(@ptrCast(req.data.?))));
const rc = this.io_request.result;
if (rc.errno()) |err| {
this.throw(.{
@@ -566,7 +566,7 @@ pub const WriteFileWindows = struct {
}
pub fn throw(this: *WriteFileWindows, err: bun.sys.Error) void {
- std.debug.assert(this.err == null);
+ bun.assert(this.err == null);
this.err = err;
this.onFinish();
}
diff --git a/src/bun.js/webcore/body.zig b/src/bun.js/webcore/body.zig
index 3ece0a37ed76b0..ba30e8a4f2763b 100644
--- a/src/bun.js/webcore/body.zig
+++ b/src/bun.js/webcore/body.zig
@@ -506,7 +506,7 @@ pub const Body = struct {
};
}
- std.debug.assert(str.tag == .WTFStringImpl);
+ assert(str.tag == .WTFStringImpl);
return Body.Value{
.WTFStringImpl = str.value.WTFStringImpl,
@@ -703,7 +703,7 @@ pub const Body = struct {
switch (this.*) {
.Blob => {
const new_blob = this.Blob;
- std.debug.assert(new_blob.allocator == null); // owned by Body
+ assert(new_blob.allocator == null); // owned by Body
this.* = .{ .Used = {} };
return new_blob;
},
@@ -943,7 +943,7 @@ pub const Body = struct {
body.value = Value.fromJS(globalThis, value) orelse return null;
if (body.value == .Blob)
- std.debug.assert(body.value.Blob.allocator == null); // owned by Body
+ assert(body.value.Blob.allocator == null); // owned by Body
return body;
}
@@ -1348,7 +1348,7 @@ pub const BodyValueBufferer = struct {
// explicitly set it to a dead pointer
// we use this memory address to disable signals being sent
signal.clear();
- std.debug.assert(signal.isDead());
+ assert(signal.isDead());
const assignment_result: JSValue = ArrayBufferSink.JSSink.assignToStream(
globalThis,
@@ -1360,7 +1360,7 @@ pub const BodyValueBufferer = struct {
assignment_result.ensureStillAlive();
// assert that it was updated
- std.debug.assert(!signal.isDead());
+ assert(!signal.isDead());
if (assignment_result.isError()) {
return error.PipeFailed;
@@ -1398,7 +1398,7 @@ pub const BodyValueBufferer = struct {
}
fn bufferLockedBodyValue(sink: *@This(), value: *JSC.WebCore.Body.Value) !void {
- std.debug.assert(value.* == .Locked);
+ assert(value.* == .Locked);
const locked = &value.Locked;
if (locked.readable.get()) |stream| {
// keep the stream alive until we're done with it
@@ -1421,8 +1421,8 @@ pub const BodyValueBufferer = struct {
return error.UnsupportedStreamType;
},
.Bytes => |byte_stream| {
- std.debug.assert(byte_stream.pipe.ctx == null);
- std.debug.assert(sink.byte_stream == null);
+ assert(byte_stream.pipe.ctx == null);
+ assert(sink.byte_stream == null);
const bytes = byte_stream.buffer.items;
// If we've received the complete body by the time this function is called
@@ -1496,3 +1496,5 @@ pub const BodyValueBufferer = struct {
}
}
};
+
+const assert = bun.assert;
diff --git a/src/bun.js/webcore/encoding.zig b/src/bun.js/webcore/encoding.zig
index 690be5a8553ef9..f24dfbf8f0432a 100644
--- a/src/bun.js/webcore/encoding.zig
+++ b/src/bun.js/webcore/encoding.zig
@@ -61,17 +61,17 @@ pub const TextEncoder = struct {
if (slice.len <= buf.len / 2) {
const result = strings.copyLatin1IntoUTF8(&buf, []const u8, slice);
const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, result.written);
- std.debug.assert(result.written <= buf.len);
- std.debug.assert(result.read == slice.len);
+ bun.assert(result.written <= buf.len);
+ bun.assert(result.read == slice.len);
const array_buffer = uint8array.asArrayBuffer(globalThis).?;
- std.debug.assert(result.written == array_buffer.len);
+ bun.assert(result.written == array_buffer.len);
@memcpy(array_buffer.byteSlice()[0..result.written], buf[0..result.written]);
return uint8array;
} else {
const bytes = strings.allocateLatin1IntoUTF8(globalThis.bunVM().allocator, []const u8, slice) catch {
return JSC.toInvalidArguments("Out of memory", .{}, globalThis);
};
- std.debug.assert(bytes.len >= slice.len);
+ bun.assert(bytes.len >= slice.len);
return ArrayBuffer.fromBytes(bytes, .Uint8Array).toJSUnchecked(globalThis, null);
}
}
@@ -104,10 +104,10 @@ pub const TextEncoder = struct {
return uint8array;
}
const uint8array = JSC.JSValue.createUninitializedUint8Array(globalThis, result.written);
- std.debug.assert(result.written <= buf.len);
- std.debug.assert(result.read == slice.len);
+ bun.assert(result.written <= buf.len);
+ bun.assert(result.read == slice.len);
const array_buffer = uint8array.asArrayBuffer(globalThis).?;
- std.debug.assert(result.written == array_buffer.len);
+ bun.assert(result.written == array_buffer.len);
@memcpy(array_buffer.slice()[0..result.written], buf[0..result.written]);
return uint8array;
} else {
@@ -178,7 +178,7 @@ pub const TextEncoder = struct {
globalThis: *JSGlobalObject,
rope_str: *JSC.JSString,
) JSValue {
- if (comptime Environment.allow_assert) std.debug.assert(rope_str.is8Bit());
+ if (comptime Environment.allow_assert) bun.assert(rope_str.is8Bit());
var stack_buf: [2048]u8 = undefined;
var buf_to_use: []u8 = &stack_buf;
const length = rope_str.length();
@@ -1008,7 +1008,7 @@ pub const Encoder = struct {
defer str.deref();
const wrote = strings.encodeBytesToHex(chars, input);
- std.debug.assert(wrote == chars.len);
+ bun.assert(wrote == chars.len);
return str.toJS(global);
},
diff --git a/src/bun.js/webcore/request.zig b/src/bun.js/webcore/request.zig
index 2817348045224f..28ad41fad8dc2d 100644
--- a/src/bun.js/webcore/request.zig
+++ b/src/bun.js/webcore/request.zig
@@ -372,7 +372,7 @@ pub const Request = struct {
});
if (comptime Environment.allow_assert) {
- std.debug.assert(this.sizeOfURL() == url_bytelength);
+ bun.assert(this.sizeOfURL() == url_bytelength);
}
if (url_bytelength < 128) {
@@ -384,7 +384,7 @@ pub const Request = struct {
}) catch @panic("Unexpected error while printing URL");
if (comptime Environment.allow_assert) {
- std.debug.assert(this.sizeOfURL() == url.len);
+ bun.assert(this.sizeOfURL() == url.len);
}
var href = bun.JSC.URL.hrefFromString(bun.String.fromBytes(url));
@@ -434,7 +434,7 @@ pub const Request = struct {
}
if (comptime Environment.allow_assert) {
- std.debug.assert(this.sizeOfURL() == req_url.len);
+ bun.assert(this.sizeOfURL() == req_url.len);
}
this.url = bun.String.createUTF8(req_url);
}
diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig
index 3531f11d3fd96b..4f794b672235ac 100644
--- a/src/bun.js/webcore/response.zig
+++ b/src/bun.js/webcore/response.zig
@@ -470,7 +470,7 @@ pub const Response = struct {
break :brk Init.init(bun.default_allocator, globalThis, arguments[1]) catch null;
}
- std.debug.assert(!arguments[1].isEmptyOrUndefinedOrNull());
+ bun.assert(!arguments[1].isEmptyOrUndefinedOrNull());
const err = globalThis.createTypeErrorInstance("Expected options to be one of: null, undefined, or object", .{});
globalThis.throwValue(err);
@@ -1204,7 +1204,7 @@ pub const Fetch = struct {
}
pub fn onReject(this: *FetchTasklet) JSValue {
- std.debug.assert(this.result.fail != null);
+ bun.assert(this.result.fail != null);
log("onReject", .{});
if (this.getAbortError()) |err| {
@@ -1406,7 +1406,7 @@ pub const Fetch = struct {
fn toResponse(this: *FetchTasklet) Response {
log("toResponse", .{});
- std.debug.assert(this.metadata != null);
+ bun.assert(this.metadata != null);
// at this point we always should have metadata
const metadata = this.metadata.?;
const http_response = metadata.response;
@@ -1539,8 +1539,8 @@ pub const Fetch = struct {
fetch_tasklet.signal_store.header_progress.store(true, .Monotonic);
if (fetch_tasklet.request_body == .Sendfile) {
- std.debug.assert(fetch_options.url.isHTTP());
- std.debug.assert(fetch_options.proxy == null);
+ bun.assert(fetch_options.url.isHTTP());
+ bun.assert(fetch_options.proxy == null);
fetch_tasklet.http.?.request_body = .{ .sendfile = fetch_tasklet.request_body.Sendfile };
}
@@ -1618,7 +1618,7 @@ pub const Fetch = struct {
// metadata should be provided only once so we preserve it until we consume it
if (result.metadata) |metadata| {
log("added callback metadata", .{});
- std.debug.assert(task.metadata == null);
+ bun.assert(task.metadata == null);
task.metadata = metadata;
}
task.body_size = result.body_size;
diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig
index 01df75471c52f3..11cfadf62cbdb5 100644
--- a/src/bun.js/webcore/streams.zig
+++ b/src/bun.js/webcore/streams.zig
@@ -42,7 +42,7 @@ const uws = bun.uws;
const Blob = JSC.WebCore.Blob;
const Response = JSC.WebCore.Response;
const Request = JSC.WebCore.Request;
-const assert = std.debug.assert;
+const assert = bun.assert;
const Syscall = bun.sys;
const uv = bun.windows.libuv;
@@ -115,7 +115,7 @@ pub const ReadableStream = struct {
var blob = JSC.WebCore.Blob.initWithStore(blobby.lazy.blob, globalThis);
blob.store.?.ref();
// it should be lazy, file shouldn't have opened yet.
- std.debug.assert(!blobby.started);
+ bun.assert(!blobby.started);
stream.done(globalThis);
return AnyBlob{ .Blob = blob };
}
@@ -1122,8 +1122,8 @@ pub const Sink = struct {
if (stack_size >= str.len * 2) {
var buf: [stack_size]u8 = undefined;
const copied = strings.copyUTF16IntoUTF8(&buf, []const u16, str, true);
- std.debug.assert(copied.written <= stack_size);
- std.debug.assert(copied.read <= stack_size);
+ bun.assert(copied.written <= stack_size);
+ bun.assert(copied.read <= stack_size);
if (input.isDone()) {
const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.init(buf[0..copied.written]) });
return result;
@@ -1264,7 +1264,7 @@ pub const ArrayBufferSink = struct {
as_uint8array: bool = false,
pub fn connect(this: *ArrayBufferSink, signal: Signal) void {
- std.debug.assert(this.reader == null);
+ bun.assert(this.reader == null);
this.signal = signal;
}
@@ -1411,7 +1411,7 @@ pub const ArrayBufferSink = struct {
return .{ .result = ArrayBuffer.fromBytes(&[_]u8{}, .ArrayBuffer) };
}
- std.debug.assert(this.next == null);
+ bun.assert(this.next == null);
var list = this.bytes.listManaged(this.allocator);
this.bytes = bun.ByteList.init("");
this.done = true;
@@ -1446,15 +1446,15 @@ const AutoFlusher = struct {
}
pub fn unregisterDeferredMicrotaskWithTypeUnchecked(comptime Type: type, this: *Type, vm: *JSC.VirtualMachine) void {
- std.debug.assert(this.auto_flusher.registered);
- std.debug.assert(vm.eventLoop().deferred_tasks.unregisterTask(this));
+ bun.assert(this.auto_flusher.registered);
+ bun.assert(vm.eventLoop().deferred_tasks.unregisterTask(this));
this.auto_flusher.registered = false;
}
pub fn registerDeferredMicrotaskWithTypeUnchecked(comptime Type: type, this: *Type, vm: *JSC.VirtualMachine) void {
- std.debug.assert(!this.auto_flusher.registered);
+ bun.assert(!this.auto_flusher.registered);
this.auto_flusher.registered = true;
- std.debug.assert(!vm.eventLoop().deferred_tasks.postTask(this, @ptrCast(&Type.onAutoFlush)));
+ bun.assert(!vm.eventLoop().deferred_tasks.postTask(this, @ptrCast(&Type.onAutoFlush)));
}
};
@@ -1990,7 +1990,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type {
return this.has_backpressure and this.end_len > 0;
}
fn sendWithoutAutoFlusher(this: *@This(), buf: []const u8) bool {
- std.debug.assert(!this.done);
+ bun.assert(!this.done);
defer log("send: {d} bytes (backpressure: {any})", .{ buf.len, this.has_backpressure });
if (this.requested_end and !this.res.state().isHttpWriteCalled()) {
@@ -2103,7 +2103,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type {
this.flushPromise();
if (this.buffer.cap == 0) {
- std.debug.assert(this.pooled_buffer == null);
+ bun.assert(this.pooled_buffer == null);
if (comptime FeatureFlags.http_buffer_pooling) {
if (ByteListPool.getIfExists()) |pooled_node| {
this.pooled_buffer = pooled_node;
@@ -2675,7 +2675,7 @@ pub fn ReadableStreamSource(
if (@hasDecl(Context, "setRawMode")) {
const flag = call_frame.argument(0);
if (Environment.allow_assert) {
- std.debug.assert(flag.isBoolean());
+ bun.assert(flag.isBoolean());
}
return switch (this.context.setRawMode(flag == .true)) {
.result => .undefined,
@@ -2906,7 +2906,7 @@ pub const FileSink = struct {
pub const Poll = IOWriter;
fn Bun__ForceFileSinkToBeSynchronousOnWindows(globalObject: *JSC.JSGlobalObject, jsvalue: JSC.JSValue) callconv(.C) void {
- comptime std.debug.assert(Environment.isWindows);
+ comptime bun.assert(Environment.isWindows);
var this: *FileSink = @alignCast(@ptrCast(JSSink.fromJS(globalObject, jsvalue) orelse return));
this.force_sync_on_windows = true;
@@ -3573,7 +3573,7 @@ pub const FileReader = struct {
return .{ .err = err };
},
.result => |opened| {
- std.debug.assert(opened.fd.isValid());
+ bun.assert(opened.fd.isValid());
this.fd = opened.fd;
pollable = opened.pollable;
file_type = opened.file_type;
@@ -3917,7 +3917,7 @@ pub const FileReader = struct {
const out = bun.ByteList.init(this.buffered.items);
this.buffered = .{};
if (comptime Environment.allow_assert) {
- std.debug.assert(this.reader.buffer().items.ptr != out.ptr);
+ bun.assert(this.reader.buffer().items.ptr != out.ptr);
}
return out;
}
@@ -4077,7 +4077,7 @@ pub const ByteBlobLoader = struct {
this.remain -|= copied;
this.offset +|= copied;
- std.debug.assert(buffer.ptr != temporary.ptr);
+ bun.assert(buffer.ptr != temporary.ptr);
@memcpy(buffer[0..temporary.len], temporary);
if (this.remain == 0) {
return .{ .into_array_and_done = .{ .value = array, .len = copied } };
@@ -4240,7 +4240,7 @@ pub const ByteStream = struct {
return;
}
- std.debug.assert(!this.has_received_last_chunk);
+ bun.assert(!this.has_received_last_chunk);
this.has_received_last_chunk = stream.isDone();
if (this.pipe.ctx) |ctx| {
@@ -4251,10 +4251,10 @@ pub const ByteStream = struct {
const chunk = stream.slice();
if (this.pending.state == .pending) {
- std.debug.assert(this.buffer.items.len == 0);
+ bun.assert(this.buffer.items.len == 0);
const to_copy = this.pending_buffer[0..@min(chunk.len, this.pending_buffer.len)];
const pending_buffer_len = this.pending_buffer.len;
- std.debug.assert(to_copy.ptr != chunk.ptr);
+ bun.assert(to_copy.ptr != chunk.ptr);
@memcpy(to_copy, chunk[0..to_copy.len]);
this.pending_buffer = &.{};
@@ -4358,10 +4358,10 @@ pub const ByteStream = struct {
pub fn onPull(this: *@This(), buffer: []u8, view: JSC.JSValue) StreamResult {
JSC.markBinding(@src());
- std.debug.assert(buffer.len > 0);
+ bun.assert(buffer.len > 0);
if (this.buffer.items.len > 0) {
- std.debug.assert(this.value() == .zero);
+ bun.assert(this.value() == .zero);
const to_write = @min(
this.buffer.items.len - this.offset,
buffer.len,
@@ -4552,8 +4552,8 @@ pub fn NewReadyWatcher(
@panic("TODO on Windows");
}
- std.debug.assert(this.poll_ref.?.fd == fd_);
- std.debug.assert(
+ bun.assert(this.poll_ref.?.fd == fd_);
+ bun.assert(
this.poll_ref.?.unregister(JSC.VirtualMachine.get().event_loop_handle.?, false) == .result,
);
this.poll_ref.?.disableKeepingProcessAlive(JSC.VirtualMachine.get());
@@ -4594,8 +4594,8 @@ pub fn NewReadyWatcher(
);
break :brk this.poll_ref.?;
};
- std.debug.assert(poll_ref.fd == fd);
- std.debug.assert(!this.isWatching());
+ bun.assert(poll_ref.fd == fd);
+ bun.assert(!this.isWatching());
switch (poll_ref.register(JSC.VirtualMachine.get().event_loop_handle.?, flag, true)) {
.err => |err| {
std.debug.panic("FilePoll.register failed: {d}", .{err.errno});
diff --git a/src/bun.zig b/src/bun.zig
index 52ad0fc2c71bbf..d2f6ce5ee9eda9 100644
--- a/src/bun.zig
+++ b/src/bun.zig
@@ -129,7 +129,7 @@ pub const FileDescriptor = enum(FileDescriptorInt) {
}
pub fn assertKind(fd: FileDescriptor, kind: FDImpl.Kind) void {
- std.debug.assert(FDImpl.decode(fd).kind == kind);
+ assert(FDImpl.decode(fd).kind == kind);
}
pub fn cwd() FileDescriptor {
@@ -262,7 +262,7 @@ pub fn len(value: anytype) usize {
return indexOfSentinel(info.child, sentinel, value);
},
.C => {
- std.debug.assert(value != null);
+ assert(value != null);
return indexOfSentinel(info.child, 0, value);
},
.Slice => value.len,
@@ -398,14 +398,14 @@ pub inline fn range(comptime min: anytype, comptime max: anytype) [max - min]usi
}
pub fn copy(comptime Type: type, dest: []Type, src: []const Type) void {
- if (comptime Environment.allow_assert) std.debug.assert(dest.len >= src.len);
+ if (comptime Environment.allow_assert) assert(dest.len >= src.len);
if (@intFromPtr(src.ptr) == @intFromPtr(dest.ptr) or src.len == 0) return;
const input: []const u8 = std.mem.sliceAsBytes(src);
const output: []u8 = std.mem.sliceAsBytes(dest);
- std.debug.assert(input.len > 0);
- std.debug.assert(output.len > 0);
+ assert(input.len > 0);
+ assert(output.len > 0);
const does_input_or_output_overlap = (@intFromPtr(input.ptr) < @intFromPtr(output.ptr) and
@intFromPtr(input.ptr) + input.len > @intFromPtr(output.ptr)) or
@@ -515,7 +515,7 @@ pub fn rand(bytes: []u8) void {
pub const ObjectPool = @import("./pool.zig").ObjectPool;
pub fn assertNonBlocking(fd: anytype) void {
- std.debug.assert(
+ assert(
(std.os.fcntl(fd, std.os.F.GETFL, 0) catch unreachable) & std.os.O.NONBLOCK != 0,
);
}
@@ -530,7 +530,7 @@ pub fn isReadable(fd: FileDescriptor) PollFlag {
if (comptime Environment.isWindows) {
@panic("TODO on Windows");
}
- std.debug.assert(fd != invalid_fd);
+ assert(fd != invalid_fd);
var polls = [_]std.os.pollfd{
.{
.fd = fd.cast(),
@@ -577,7 +577,7 @@ pub fn isWritable(fd: FileDescriptor) PollFlag {
}
return;
}
- std.debug.assert(fd != invalid_fd);
+ assert(fd != invalid_fd);
var polls = [_]std.os.pollfd{
.{
@@ -675,7 +675,7 @@ pub fn rangeOfSliceInBuffer(slice: []const u8, buffer: []const u8) ?[2]u32 {
@as(u32, @truncate(slice.len)),
};
if (comptime Environment.allow_assert)
- std.debug.assert(strings.eqlLong(slice, buffer[r[0]..][0..r[1]], false));
+ assert(strings.eqlLong(slice, buffer[r[0]..][0..r[1]], false));
return r;
}
@@ -799,7 +799,7 @@ pub const FDHashMapContext = struct {
// a file descriptor is i32 on linux, u64 on windows
// the goal here is to do zero work and widen the 32 bit type to 64
// this should compile error if FileDescriptor somehow is larger than 64 bits.
- comptime std.debug.assert(@bitSizeOf(FileDescriptor) <= 64);
+ comptime assert(@bitSizeOf(FileDescriptor) <= 64);
return @intCast(fd.int());
}
pub fn eql(_: @This(), a: FileDescriptor, b: FileDescriptor) bool {
@@ -1334,7 +1334,7 @@ fn lenSliceTo(ptr: anytype, comptime end: meta.Elem(@TypeOf(ptr))) usize {
return i;
},
.C => {
- std.debug.assert(ptr != null);
+ assert(ptr != null);
return indexOfSentinel(ptr_info.child, end, ptr);
},
.Slice => {
@@ -1395,7 +1395,7 @@ fn SliceTo(comptime T: type, comptime end: meta.Elem(T)) type {
.C => {
new_ptr_info.sentinel = &end;
// C pointers are always allowzero, but we don't want the return type to be.
- std.debug.assert(new_ptr_info.is_allowzero);
+ assert(new_ptr_info.is_allowzero);
new_ptr_info.is_allowzero = false;
},
}
@@ -1434,7 +1434,7 @@ pub fn cstring(input: []const u8) [:0]const u8 {
return "";
if (comptime Environment.allow_assert) {
- std.debug.assert(
+ assert(
input.ptr[input.len] == 0,
);
}
@@ -2806,7 +2806,7 @@ pub fn NewRefCounted(comptime T: type, comptime deinit_fn: ?fn (self: *T) void)
pub fn destroy(self: *T) void {
if (comptime Environment.allow_assert) {
- std.debug.assert(self.ref_count == 0);
+ assert(self.ref_count == 0);
allocation_logger("destroy() = {*}", .{self});
}
@@ -2854,7 +2854,7 @@ pub fn NewRefCounted(comptime T: type, comptime deinit_fn: ?fn (self: *T) void)
ptr.* = t;
if (comptime Environment.allow_assert) {
- std.debug.assert(ptr.ref_count == 1);
+ assert(ptr.ref_count == 1);
allocation_logger("new() = {*}", .{ptr});
}
@@ -2928,7 +2928,7 @@ pub fn errnoToZigErr(err: anytype) anyerror {
err;
if (Environment.allow_assert) {
- std.debug.assert(num != 0);
+ assert(num != 0);
}
if (Environment.os == .windows) {
@@ -2936,7 +2936,7 @@ pub fn errnoToZigErr(err: anytype) anyerror {
num = @abs(num);
} else {
if (Environment.allow_assert) {
- std.debug.assert(num > 0);
+ assert(num > 0);
}
}
@@ -3069,3 +3069,39 @@ pub fn SliceIterator(comptime T: type) type {
}
pub const Futex = @import("./futex.zig");
+
+noinline fn assertionFailure() noreturn {
+ if (@inComptime()) {
+ @compileError("assertion failure");
+ }
+
+ @setCold(true);
+ Output.panic("Internal assertion failure. This is a bug in Bun.", .{});
+}
+
+pub inline fn debugAssert(cheap_value_only_plz: bool) void {
+ if (comptime !Environment.isDebug) {
+ return;
+ }
+
+ if (!cheap_value_only_plz) {
+ unreachable;
+ }
+}
+
+pub inline fn assert(value: bool) void {
+ if (comptime !Environment.allow_assert) {
+ return;
+ }
+
+ if (!value) {
+ if (comptime Environment.isDebug) unreachable;
+ assertionFailure();
+ }
+}
+
+pub inline fn unsafeAssert(condition: bool) void {
+ if (!condition) {
+ unreachable;
+ }
+}
diff --git a/src/bundler.zig b/src/bundler.zig
index 138b5777cf38c6..8383c92d7fe815 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -1719,7 +1719,7 @@ pub const Bundler = struct {
bun.copy(u8, tmp_buildfile_buf2[len..], absolute_pathname.ext);
len += absolute_pathname.ext.len;
- if (comptime Environment.allow_assert) std.debug.assert(len > 0);
+ if (comptime Environment.allow_assert) bun.assert(len > 0);
const decoded_entry_point_path = tmp_buildfile_buf2[0..len];
break :brk try bundler.resolver.resolve(bundler.fs.top_level_dir, decoded_entry_point_path, .entry_point);
diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig
index d1b621d4dfa118..44b394bfcc0cea 100644
--- a/src/bundler/bundle_v2.zig
+++ b/src/bundler/bundle_v2.zig
@@ -946,7 +946,7 @@ pub const BundleV2 = struct {
while (iter.next()) |index| {
builder.addClientComponent(index);
}
- std.debug.assert(builder.resolved_source_indices.items.len > 0);
+ bun.assert(builder.resolved_source_indices.items.len > 0);
const path = Fs.Path.initWithNamespace(
std.fmt.allocPrint(
@@ -2905,7 +2905,7 @@ pub const ParseTask = struct {
}
// never a react client component if RSC is not enabled.
- std.debug.assert(use_directive == .none or bundler.options.react_server_components);
+ bun.assert(use_directive == .none or bundler.options.react_server_components);
step.* = .resolve;
ast.target = target;
@@ -2939,7 +2939,7 @@ pub const ParseTask = struct {
defer worker.unget();
var step: ParseTask.Result.Error.Step = .pending;
var log = Logger.Log.init(worker.allocator);
- std.debug.assert(this.source_index.isValid()); // forgot to set source_index
+ bun.assert(this.source_index.isValid()); // forgot to set source_index
const result = bun.default_allocator.create(Result) catch unreachable;
result.* = .{
@@ -3485,7 +3485,7 @@ const LinkerGraph = struct {
}
// null ref shouldn't be there.
- std.debug.assert(!ref.isEmpty());
+ bun.assert(!ref.isEmpty());
// Track that this specific symbol was imported
if (source_index_to_import_from.get() != source_index) {
@@ -3560,7 +3560,7 @@ const LinkerGraph = struct {
for (entry_points, path_strings, source_indices) |i, *path_string, *source_index| {
const source = sources[i.get()];
if (comptime Environment.allow_assert) {
- std.debug.assert(source.index.get() == i.get());
+ bun.assert(source.index.get() == i.get());
}
entry_point_kinds[source.index.get()] = EntryPoint.Kind.user_specified;
path_string.* = bun.PathString.init(source.path.text);
@@ -3568,7 +3568,7 @@ const LinkerGraph = struct {
}
for (dynamic_import_entry_points) |id| {
- std.debug.assert(this.code_splitting); // this should never be a thing without code splitting
+ bun.assert(this.code_splitting); // this should never be a thing without code splitting
if (entry_point_kinds[id] != .none) {
// You could dynamic import a file that is already an entry point
@@ -4731,7 +4731,7 @@ const LinkerContext = struct {
// for entry point files in CommonJS format (or when in pass-through mode).
if (kind == .cjs and (!entry_point_kinds[id].isEntryPoint() or output_format == .iife or output_format == .esm)) {
flags[id].wrap = .cjs;
- std.debug.assert(kind == .cjs);
+ bun.assert(kind == .cjs);
}
}
@@ -5015,7 +5015,7 @@ const LinkerContext = struct {
.ptr = string_buffer.ptr,
};
- defer std.debug.assert(builder.len == builder.cap); // ensure we used all of it
+ defer bun.assert(builder.len == builder.cap); // ensure we used all of it
// Pre-generate symbols for re-exports CommonJS symbols in case they
// are necessary later. This is done now because the symbols map cannot be
@@ -5076,7 +5076,7 @@ const LinkerContext = struct {
runtime_export_symbol_ref = this.runtimeFunction("__export");
}
- std.debug.assert(runtime_export_symbol_ref.isValid());
+ bun.assert(runtime_export_symbol_ref.isValid());
this.graph.generateSymbolImportAndUse(
id,
@@ -5275,7 +5275,7 @@ const LinkerContext = struct {
continue;
}
- std.debug.assert(@as(usize, @intCast(other_id)) < this.graph.meta.len);
+ bun.assert(@as(usize, @intCast(other_id)) < this.graph.meta.len);
const other_flags = flags[other_id];
const other_export_kind = exports_kind[other_id];
const other_source_index = other_id;
@@ -5380,7 +5380,7 @@ const LinkerContext = struct {
if (record.source_index.isValid()) {
const other_source_index = record.source_index.get();
const other_id = other_source_index;
- std.debug.assert(@as(usize, @intCast(other_id)) < this.graph.meta.len);
+ bun.assert(@as(usize, @intCast(other_id)) < this.graph.meta.len);
const other_export_kind = exports_kind[other_id];
if (other_source_index != source_index and other_export_kind.isDynamic()) {
happens_at_runtime = true;
@@ -5567,7 +5567,7 @@ const LinkerContext = struct {
const all_export_stmts: []js_ast.Stmt = stmts.head[0 .. @as(usize, @intFromBool(needs_exports_variable)) + @as(usize, @intFromBool(properties.items.len > 0))];
stmts.head = stmts.head[all_export_stmts.len..];
var remaining_stmts = all_export_stmts;
- defer std.debug.assert(remaining_stmts.len == 0); // all must be used
+ defer bun.assert(remaining_stmts.len == 0); // all must be used
// Prefix this part with "var exports = {}" if this isn't a CommonJS entry point
if (needs_exports_variable) {
@@ -5795,7 +5795,7 @@ const LinkerContext = struct {
// Now that we know this, we can determine cross-part dependencies
for (symbol_uses, 0..) |ref, j| {
if (comptime Environment.allow_assert) {
- std.debug.assert(part.symbol_uses.values()[j].count_estimate > 0);
+ bun.assert(part.symbol_uses.values()[j].count_estimate > 0);
}
const other_parts = c.topLevelSymbolsToParts(id, ref);
@@ -5898,7 +5898,7 @@ const LinkerContext = struct {
}
} else if (file_entry_bits.len > 0) {
// assert that the tag is correct
- std.debug.assert(file_entry_bits[0] == .static);
+ bun.assert(file_entry_bits[0] == .static);
}
// Code splitting: Determine which entry points can reach which files. This
@@ -6171,7 +6171,7 @@ const LinkerContext = struct {
// imports because of export alias renaming, which must consider all export
// aliases simultaneously to avoid collisions.
{
- std.debug.assert(chunk_metas.len == chunks.len);
+ bun.assert(chunk_metas.len == chunks.len);
var r = renamer.ExportRenamer.init(c.allocator);
defer r.deinit();
debug("Generating cross-chunk exports", .{});
@@ -6689,7 +6689,7 @@ const LinkerContext = struct {
_ = chunk_index;
const c = ctx.c;
- std.debug.assert(chunk.content == .javascript);
+ bun.assert(chunk.content == .javascript);
js_ast.Expr.Data.Store.create(bun.default_allocator);
js_ast.Stmt.Data.Store.create(bun.default_allocator);
@@ -8239,7 +8239,7 @@ const LinkerContext = struct {
);
stmt.data.s_local.is_export = false;
} else if (FeatureFlags.unwrap_commonjs_to_esm and s.was_commonjs_export and wrap == .cjs) {
- std.debug.assert(stmt.data.s_local.decls.len == 1);
+ bun.assert(stmt.data.s_local.decls.len == 1);
const decl = stmt.data.s_local.decls.ptr[0];
if (decl.value) |decl_value| {
stmt = Stmt.alloc(
@@ -8500,7 +8500,7 @@ const LinkerContext = struct {
// };
//
if (index == part_index_for_lazy_default_export) {
- std.debug.assert(index != std.math.maxInt(u32));
+ bun.assert(index != std.math.maxInt(u32));
const stmt = part_stmts[0];
@@ -10312,7 +10312,7 @@ const LinkerContext = struct {
.dependencies = Dependency.List.init(dependencies),
},
) catch unreachable;
- std.debug.assert(part_index != js_ast.namespace_export_part_index);
+ bun.assert(part_index != js_ast.namespace_export_part_index);
wrapper_part_index.* = Index.part(part_index);
c.graph.generateSymbolImportAndUse(
source_index,
@@ -10361,7 +10361,7 @@ const LinkerContext = struct {
.dependencies = Dependency.List.init(dependencies),
},
) catch unreachable;
- std.debug.assert(part_index != js_ast.namespace_export_part_index);
+ bun.assert(part_index != js_ast.namespace_export_part_index);
wrapper_part_index.* = Index.part(part_index);
c.graph.generateSymbolImportAndUse(
source_index,
@@ -11203,8 +11203,8 @@ pub const Chunk = struct {
) catch unreachable).len..];
}
- std.debug.assert(remain.len == 0);
- std.debug.assert(total_buf.len == count + debug_id_len);
+ bun.assert(remain.len == 0);
+ bun.assert(total_buf.len == count + debug_id_len);
return .{
.buffer = total_buf,
@@ -11269,7 +11269,7 @@ pub const Chunk = struct {
count += piece.data_len;
if (Environment.allow_assert) {
- std.debug.assert(piece.data().len == piece.data_len);
+ bun.assert(piece.data().len == piece.data_len);
}
switch (piece.index.kind) {
@@ -11322,7 +11322,7 @@ pub const Chunk = struct {
const file_path = switch (piece.index.kind) {
.asset => brk: {
const files = additional_files[index];
- std.debug.assert(files.len > 0);
+ bun.assert(files.len > 0);
const output_file = files.last().?.output_file;
@@ -11354,8 +11354,8 @@ pub const Chunk = struct {
}
}
- std.debug.assert(remain.len == 0);
- std.debug.assert(total_buf.len == count);
+ bun.assert(remain.len == 0);
+ bun.assert(total_buf.len == count);
return .{
.buffer = total_buf,
@@ -11485,7 +11485,7 @@ pub const CrossChunkImport = struct {
// TODO: do we need to clone this array?
for (import_items.slice()) |*item| {
item.export_alias = exports_to_other_chunks.get(item.ref).?;
- std.debug.assert(item.export_alias.len > 0);
+ bun.assert(item.export_alias.len > 0);
}
std.sort.pdq(CrossChunkImport.Item, import_items.slice(), {}, CrossChunkImport.Item.lessThan);
diff --git a/src/cli.zig b/src/cli.zig
index 2d30e64852a77c..660791a44ed37a 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -1714,7 +1714,7 @@ pub const Command = struct {
.RunAsNodeCommand => {
if (comptime bun.fast_debug_build_mode and bun.fast_debug_build_cmd != .RunAsNodeCommand) unreachable;
const ctx = try Command.Context.create(allocator, log, .RunAsNodeCommand);
- std.debug.assert(pretend_to_be_node);
+ bun.assert(pretend_to_be_node);
try RunCommand.execAsIfNode(ctx);
},
.UpgradeCommand => {
diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig
index f8efa60d4be266..d86431ab48c28f 100644
--- a/src/cli/bunx_command.zig
+++ b/src/cli/bunx_command.zig
@@ -269,7 +269,7 @@ pub const BunxCommand = struct {
exitWithUsage();
}
- std.debug.assert(update_requests.len == 1); // One positional cannot parse to multiple requests
+ bun.assert(update_requests.len == 1); // One positional cannot parse to multiple requests
var update_request = update_requests[0];
// if you type "tsc" and TypeScript is not installed:
@@ -531,7 +531,7 @@ pub const BunxCommand = struct {
// 2. The "bin" is possibly not the same as the package name, so we load the package.json to figure out what "bin" to use
const root_dir_fd = root_dir_info.getFileDescriptor();
- std.debug.assert(root_dir_fd != .zero);
+ bun.assert(root_dir_fd != .zero);
if (getBinName(&this_bundler, root_dir_fd, bunx_cache_dir, initial_bin_name)) |package_name_for_bin| {
// if we check the bin name and its actually the same, we don't need to check $PATH here again
if (!strings.eqlLong(package_name_for_bin, initial_bin_name, true)) {
diff --git a/src/cli/install_completions_command.zig b/src/cli/install_completions_command.zig
index 6d5a923d925efe..13221932369820 100644
--- a/src/cli/install_completions_command.zig
+++ b/src/cli/install_completions_command.zig
@@ -442,7 +442,7 @@ pub const InstallCompletionsCommand = struct {
else => unreachable,
};
- std.debug.assert(completions_dir.len > 0);
+ bun.assert(completions_dir.len > 0);
var output_file = output_dir.createFileZ(filename, .{
.truncate = true,
diff --git a/src/cli/pm_trusted_command.zig b/src/cli/pm_trusted_command.zig
index a13a3fbc8d1d44..c8d980961297c7 100644
--- a/src/cli/pm_trusted_command.zig
+++ b/src/cli/pm_trusted_command.zig
@@ -264,7 +264,7 @@ pub const TrustCommand = struct {
const alias = dep.name.slice(buf);
const package_id = pm.lockfile.buffers.resolutions.items[dep_id];
if (comptime Environment.allow_assert) {
- std.debug.assert(package_id != Install.invalid_package_id);
+ bun.assert(package_id != Install.invalid_package_id);
}
const resolution = &resolutions[package_id];
var package_scripts = scripts[package_id];
@@ -334,7 +334,7 @@ pub const TrustCommand = struct {
depth -= 1;
const _entry = scripts_at_depth.get(depth);
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(_entry != null);
+ bun.assert(_entry != null);
}
if (_entry) |entry| {
for (entry.items) |info| {
@@ -387,7 +387,7 @@ pub const TrustCommand = struct {
// now add the package names to lockfile.trustedDependencies and package.json `trustedDependencies`
const names = package_names_to_add.keys();
if (comptime Environment.allow_assert) {
- std.debug.assert(names.len > 0);
+ bun.assert(names.len > 0);
}
// could be null if these are the first packages to be trusted
@@ -443,7 +443,7 @@ pub const TrustCommand = struct {
pm.root_package_json_file.close();
if (comptime Environment.allow_assert) {
- std.debug.assert(total_scripts_ran > 0);
+ bun.assert(total_scripts_ran > 0);
}
Output.pretty(" {d} script{s} ran across {d} package{s} ", .{
diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig
index eb9a87ea55cb43..6c83424d1cce47 100644
--- a/src/cli/run_command.zig
+++ b/src/cli/run_command.zig
@@ -458,15 +458,15 @@ pub const RunCommand = struct {
// a relative lookup, because in the case we do find it, we have to
// generate this full path anyways.
if (Environment.isWindows and bun.FeatureFlags.windows_bunx_fast_path and bun.strings.hasSuffixComptime(executable, ".exe")) {
- std.debug.assert(std.fs.path.isAbsolute(executable));
+ bun.assert(std.fs.path.isAbsolute(executable));
// Using @constCast is safe because we know that
// `direct_launch_buffer` is the data destination that assumption is
// backed by the immediate assertion.
var wpath = @constCast(bun.strings.toNTPath(&BunXFastPath.direct_launch_buffer, executable));
- std.debug.assert(bun.isSliceInBufferT(u16, wpath, &BunXFastPath.direct_launch_buffer));
+ bun.assert(bun.isSliceInBufferT(u16, wpath, &BunXFastPath.direct_launch_buffer));
- std.debug.assert(wpath.len > bun.windows.nt_object_prefix.len + ".exe".len);
+ bun.assert(wpath.len > bun.windows.nt_object_prefix.len + ".exe".len);
wpath.len += ".bunx".len - ".exe".len;
@memcpy(wpath[wpath.len - "bunx".len ..], comptime bun.strings.w("bunx"));
@@ -806,7 +806,7 @@ pub const RunCommand = struct {
.ALREADY_EXISTS => {},
else => {
{
- std.debug.assert(target_path_buffer[dir_slice.len] == '\\');
+ bun.assert(target_path_buffer[dir_slice.len] == '\\');
target_path_buffer[dir_slice.len] = 0;
std.os.mkdirW(target_path_buffer[0..dir_slice.len :0], 0) catch {};
target_path_buffer[dir_slice.len] = '\\';
@@ -1615,7 +1615,7 @@ pub const RunCommand = struct {
}
pub fn execAsIfNode(ctx: Command.Context) !void {
- std.debug.assert(CLI.pretend_to_be_node);
+ bun.assert(CLI.pretend_to_be_node);
if (ctx.runtime_options.eval.script.len > 0) {
const trigger = bun.pathLiteral("/[eval]");
@@ -1673,12 +1673,12 @@ pub const BunXFastPath = struct {
if (!bun.FeatureFlags.windows_bunx_fast_path) return;
var ctx = ctx_const;
- std.debug.assert(bun.isSliceInBufferT(u16, path_to_use, &BunXFastPath.direct_launch_buffer));
+ bun.assert(bun.isSliceInBufferT(u16, path_to_use, &BunXFastPath.direct_launch_buffer));
var command_line = BunXFastPath.direct_launch_buffer[path_to_use.len..];
debug("Attempting to find and load bunx file: '{}'", .{bun.fmt.utf16(path_to_use)});
if (Environment.allow_assert) {
- std.debug.assert(std.fs.path.isAbsoluteWindowsWTF16(path_to_use));
+ bun.assert(std.fs.path.isAbsoluteWindowsWTF16(path_to_use));
}
const handle = (bun.sys.openFileAtWindows(
bun.invalid_fd, // absolute path is given
diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig
index 034bf47d458d03..901ced109e7be0 100644
--- a/src/cli/test_command.zig
+++ b/src/cli/test_command.zig
@@ -411,7 +411,7 @@ const Scanner = struct {
if (@as(FileSystem.RealFS.EntriesOption.Tag, root.*) == .entries) {
var iter = root.entries.data.iterator();
const fd = root.entries.fd;
- std.debug.assert(fd != bun.invalid_fd);
+ bun.assert(fd != bun.invalid_fd);
while (iter.next()) |entry| {
this.next(entry.value_ptr.*, fd);
}
@@ -421,7 +421,7 @@ const Scanner = struct {
while (this.dirs_to_scan.readItem()) |entry| {
if (!Environment.isWindows) {
const dir = entry.relative_dir.asDir();
- std.debug.assert(bun.toFD(dir.fd) != bun.invalid_fd);
+ bun.assert(bun.toFD(dir.fd) != bun.invalid_fd);
const parts2 = &[_]string{ entry.dir_path, entry.name.slice() };
var path2 = this.fs.absBuf(parts2, &this.open_dir_buf);
@@ -433,7 +433,7 @@ const Scanner = struct {
_ = this.readDirWithName(path2, child_dir) catch continue;
} else {
const dir = entry.relative_dir.asDir();
- std.debug.assert(bun.toFD(dir.fd) != bun.invalid_fd);
+ bun.assert(bun.toFD(dir.fd) != bun.invalid_fd);
const parts2 = &[_]string{ entry.dir_path, entry.name.slice() };
var path2 = this.fs.absBuf(parts2, &this.open_dir_buf);
@@ -532,7 +532,7 @@ const Scanner = struct {
}
if (comptime Environment.allow_assert)
- std.debug.assert(!strings.contains(name, std.fs.path.sep_str ++ "node_modules" ++ std.fs.path.sep_str));
+ bun.assert(!strings.contains(name, std.fs.path.sep_str ++ "node_modules" ++ std.fs.path.sep_str));
for (this.exclusion_names) |exclude_name| {
if (strings.eql(exclude_name, name)) return;
@@ -968,7 +968,7 @@ pub const TestCommand = struct {
const vm = this.vm;
var files = this.files;
const allocator = this.allocator;
- std.debug.assert(files.len > 0);
+ bun.assert(files.len > 0);
if (files.len > 1) {
for (files[0 .. files.len - 1]) |file_name| {
diff --git a/src/codegen/generate-classes.ts b/src/codegen/generate-classes.ts
index 57ebdcf04c5a2e..8aebf57347ffdc 100644
--- a/src/codegen/generate-classes.ts
+++ b/src/codegen/generate-classes.ts
@@ -1741,7 +1741,7 @@ pub const ${className(typeName)} = struct {
JSC.markBinding(@src());
if (comptime Environment.allow_assert) {
const value__ = ${symbolName(typeName, "create")}(globalObject, this);
- std.debug.assert(value__.as(${typeName}).? == this); // If this fails, likely a C ABI issue.
+ @import("root").bun.assert(value__.as(${typeName}).? == this); // If this fails, likely a C ABI issue.
return value__;
} else {
return ${symbolName(typeName, "create")}(globalObject, this);
@@ -1759,7 +1759,7 @@ pub const ${className(typeName)} = struct {
/// Detach the ptr from the thisValue
pub fn detachPtr(_: *${typeName}, value: JSC.JSValue) void {
JSC.markBinding(@src());
- std.debug.assert(${symbolName(typeName, "dangerouslySetPtr")}(value, null));
+ bun.assert(${symbolName(typeName, "dangerouslySetPtr")}(value, null));
}
extern fn ${symbolName(typeName, "fromJS")}(JSC.JSValue) ?*${typeName};
diff --git a/src/css_scanner.zig b/src/css_scanner.zig
index d0424aef4829fe..03ad54e3bb6e92 100644
--- a/src/css_scanner.zig
+++ b/src/css_scanner.zig
@@ -806,7 +806,7 @@ pub const Scanner = struct {
inline fn nextCodepointSlice(it: *Scanner, comptime advance: bool) []const u8 {
@setRuntimeSafety(false);
if (comptime Environment.allow_assert) {
- std.debug.assert(it.source.contents.len > 0);
+ bun.assert(it.source.contents.len > 0);
}
const cp_len = strings.utf8ByteSequenceLength(it.source.contents[it.current]);
@@ -900,7 +900,7 @@ pub fn NewWriter(
log: *logger.Log,
allocator: std.mem.Allocator,
) anyerror!void {
- std.debug.assert(writer.source.contents.len > 0);
+ bun.assert(writer.source.contents.len > 0);
var scanner = Scanner.init(
log,
@@ -918,7 +918,7 @@ pub fn NewWriter(
log: *logger.Log,
allocator: std.mem.Allocator,
) !usize {
- std.debug.assert(writer.source.contents.len > 0);
+ bun.assert(writer.source.contents.len > 0);
var scanner = Scanner.init(
log,
@@ -938,7 +938,7 @@ pub fn NewWriter(
log: *logger.Log,
allocator: std.mem.Allocator,
) anyerror!void {
- std.debug.assert(writer.source.contents.len > 0);
+ bun.assert(writer.source.contents.len > 0);
var scanner = Scanner.init(
log,
diff --git a/src/deps/boringssl.translated.zig b/src/deps/boringssl.translated.zig
index 069676b37815a7..01a99ae4b14574 100644
--- a/src/deps/boringssl.translated.zig
+++ b/src/deps/boringssl.translated.zig
@@ -18782,7 +18782,7 @@ pub const struct_bio_st = extern struct {
pub fn slice(this: *struct_bio_st) []u8 {
var buf_mem: ?*BUF_MEM = null;
- std.debug.assert(BIO_get_mem_ptr(this, &buf_mem) > -1);
+ bun.assert(BIO_get_mem_ptr(this, &buf_mem) > -1);
if (buf_mem) |buf| {
if (buf.data == null) return &[_]u8{};
@@ -19033,12 +19033,12 @@ pub const SSL = opaque {
_ = SSL_clear_mode(ssl, mode);
const alpns = &[_]u8{ 8, 'h', 't', 't', 'p', '/', '1', '.', '1' };
- std.debug.assert(SSL_set_alpn_protos(ssl, alpns, alpns.len) == 0);
+ bun.assert(SSL_set_alpn_protos(ssl, alpns, alpns.len) == 0);
SSL_enable_signed_cert_timestamps(ssl);
SSL_enable_ocsp_stapling(ssl);
- // std.debug.assert(SSL_set_strict_cipher_list(ssl, SSL_DEFAULT_CIPHER_LIST) == 0);
+ // bun.assert(SSL_set_strict_cipher_list(ssl, SSL_DEFAULT_CIPHER_LIST) == 0);
SSL_set_enable_ech_grease(ssl, 1);
}
@@ -19205,22 +19205,22 @@ pub const BIOMethod = struct {
) *BIO_METHOD {
const method = BIO_meth_new(BIO_get_new_index() | BIO_TYPE_SOURCE_SINK, name);
if (comptime create__) |create_| {
- std.debug.assert(BIO_meth_set_create(method, create_) > 0);
+ bun.assert(BIO_meth_set_create(method, create_) > 0);
}
if (comptime destroy__) |destroy_| {
- std.debug.assert(BIO_meth_set_destroy(method, destroy_) > 0);
+ bun.assert(BIO_meth_set_destroy(method, destroy_) > 0);
}
if (comptime write__) |write_| {
- std.debug.assert(BIO_meth_set_write(method, write_) > 0);
+ bun.assert(BIO_meth_set_write(method, write_) > 0);
}
if (comptime read__) |read_| {
- std.debug.assert(BIO_meth_set_read(method, read_) > 0);
+ bun.assert(BIO_meth_set_read(method, read_) > 0);
}
if (comptime gets__) |gets_| {
- std.debug.assert(BIO_meth_set_gets(method, gets_) > 0);
+ bun.assert(BIO_meth_set_gets(method, gets_) > 0);
}
if (comptime ctrl__) |ctrl_| {
- std.debug.assert(BIO_meth_set_ctrl(method, ctrl_) > 0);
+ bun.assert(BIO_meth_set_ctrl(method, ctrl_) > 0);
}
return method;
diff --git a/src/deps/libuv.zig b/src/deps/libuv.zig
index 44285763cd8bc4..b6c8a16bd94772 100644
--- a/src/deps/libuv.zig
+++ b/src/deps/libuv.zig
@@ -729,7 +729,7 @@ pub const uv_buf_t = extern struct {
base: [*]u8,
pub fn init(input: []const u8) uv_buf_t {
- std.debug.assert(input.len <= @as(usize, std.math.maxInt(ULONG)));
+ bun.assert(input.len <= @as(usize, std.math.maxInt(ULONG)));
return .{ .len = @truncate(input.len), .base = @constCast(input.ptr) };
}
@@ -1937,7 +1937,7 @@ pub const struct_uv_utsname_s = extern struct {
machine: [255:0]u8,
comptime {
- std.debug.assert(@sizeOf(struct_uv_utsname_s) == 256 * 4);
+ bun.assert(@sizeOf(struct_uv_utsname_s) == 256 * 4);
}
};
pub const uv_utsname_t = struct_uv_utsname_s;
diff --git a/src/deps/picohttp.zig b/src/deps/picohttp.zig
index 2153abf23f6f4d..e46ded225d0fa4 100644
--- a/src/deps/picohttp.zig
+++ b/src/deps/picohttp.zig
@@ -9,7 +9,7 @@ const StringBuilder = bun.StringBuilder;
const fmt = std.fmt;
-const assert = std.debug.assert;
+const assert = bun.assert;
pub const Header = struct {
name: []const u8,
diff --git a/src/deps/uws.zig b/src/deps/uws.zig
index f38d9ee11e2faf..386934c50d2dd8 100644
--- a/src/deps/uws.zig
+++ b/src/deps/uws.zig
@@ -1556,7 +1556,7 @@ pub const Request = opaque {
return ptr[0..req.uws_req_get_method(&ptr)];
}
pub fn header(req: *Request, name: []const u8) ?[]const u8 {
- std.debug.assert(std.ascii.isLower(name[0]));
+ bun.assert(std.ascii.isLower(name[0]));
var ptr: [*]const u8 = undefined;
const len = req.uws_req_get_header(name.ptr, name.len, &ptr);
diff --git a/src/deps/zig b/src/deps/zig
index 0da041f84ea019..7fe33d94eaeb1a 160000
--- a/src/deps/zig
+++ b/src/deps/zig
@@ -1 +1 @@
-Subproject commit 0da041f84ea019cbdde7a3bc4af69f0d3258f50c
+Subproject commit 7fe33d94eaeb1af7705e9c5f43a3b243aa895436
diff --git a/src/enums.zig b/src/enums.zig
index f5aaf71edc5823..bb0c8d8090d031 100644
--- a/src/enums.zig
+++ b/src/enums.zig
@@ -2,7 +2,7 @@
// This exists mostly as a workaround for https://github.com/ziglang/zig/issues/16980
const std = @import("std");
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
const testing = std.testing;
const EnumField = std.builtin.Type.EnumField;
diff --git a/src/env_loader.zig b/src/env_loader.zig
index ac3d7049211d71..f98bd7652517e9 100644
--- a/src/env_loader.zig
+++ b/src/env_loader.zig
@@ -300,14 +300,14 @@ pub const Loader = struct {
if (behavior != .disable and behavior != .load_all_without_inlining) {
if (behavior == .prefix) {
- std.debug.assert(prefix.len > 0);
+ bun.assert(prefix.len > 0);
while (iter.next()) |entry| {
if (strings.startsWith(entry.key_ptr.*, prefix)) {
key_buf_len += entry.key_ptr.len;
key_count += 1;
e_strings_to_allocate += 1;
- std.debug.assert(entry.key_ptr.len > 0);
+ bun.assert(entry.key_ptr.len > 0);
}
}
} else {
@@ -317,7 +317,7 @@ pub const Loader = struct {
key_count += 1;
e_strings_to_allocate += 1;
- std.debug.assert(entry.key_ptr.len > 0);
+ bun.assert(entry.key_ptr.len > 0);
}
}
}
@@ -360,7 +360,7 @@ pub const Loader = struct {
} else {
const hash = bun.hash(entry.key_ptr.*);
- std.debug.assert(hash != invalid_hash);
+ bun.assert(hash != invalid_hash);
if (std.mem.indexOfScalar(u64, string_map_hashes, hash)) |key_i| {
e_strings[0] = js_ast.E.String{
@@ -862,7 +862,7 @@ const Parser = struct {
}
fn parseQuoted(this: *Parser, comptime quote: u8) ?string {
- if (comptime Environment.allow_assert) std.debug.assert(this.src[this.pos] == quote);
+ if (comptime Environment.allow_assert) bun.assert(this.src[this.pos] == quote);
const start = this.pos;
const max_len = value_buffer.len;
var end = start + 1;
@@ -883,7 +883,7 @@ const Parser = struct {
while (i < end and ptr < max_len) {
switch (this.src[i]) {
'\\' => if (comptime quote == '"') {
- if (comptime Environment.allow_assert) std.debug.assert(i + 1 < end);
+ if (comptime Environment.allow_assert) bun.assert(i + 1 < end);
switch (this.src[i + 1]) {
'n' => {
value_buffer[ptr] = '\n';
@@ -1097,7 +1097,7 @@ pub const Map = struct {
bun.copy(u8, env_buf[pair.key_ptr.len + 1 ..], pair.value_ptr.value);
envp_buf[i] = env_buf.ptr;
}
- if (comptime Environment.allow_assert) std.debug.assert(i == envp_count);
+ if (comptime Environment.allow_assert) bun.assert(i == envp_count);
}
return envp_buf;
}
@@ -1166,7 +1166,7 @@ pub const Map = struct {
pub inline fn put(this: *Map, key: string, value: string) !void {
if (Environment.isWindows and Environment.allow_assert) {
- std.debug.assert(bun.strings.indexOfChar(key, '\x00') == null);
+ bun.assert(bun.strings.indexOfChar(key, '\x00') == null);
}
try this.map.put(key, .{
.value = value,
diff --git a/src/fd.zig b/src/fd.zig
index 4873be9f0dd3db..b126c1b7e85ed0 100644
--- a/src/fd.zig
+++ b/src/fd.zig
@@ -24,7 +24,7 @@ fn handleToNumber(handle: FDImpl.System) FDImpl.SystemAsInt {
fn numberToHandle(handle: FDImpl.SystemAsInt) FDImpl.System {
if (env.os == .windows) {
if (!@inComptime()) {
- std.debug.assert(handle != FDImpl.invalid_value);
+ bun.assert(handle != FDImpl.invalid_value);
}
return @ptrFromInt(handle);
} else {
@@ -39,7 +39,7 @@ pub fn uv_get_osfhandle(in: c_int) libuv.uv_os_fd_t {
pub fn uv_open_osfhandle(in: libuv.uv_os_fd_t) error{SystemFdQuotaExceeded}!c_int {
const out = libuv.uv_open_osfhandle(in);
- std.debug.assert(out >= -1);
+ bun.assert(out >= -1);
if (out == -1) return error.SystemFdQuotaExceeded;
return out;
}
@@ -91,11 +91,11 @@ pub const FDImpl = packed struct {
enum(u0) { system };
comptime {
- std.debug.assert(@sizeOf(FDImpl) == @sizeOf(System));
+ bun.assert(@sizeOf(FDImpl) == @sizeOf(System));
if (env.os == .windows) {
// we want the conversion from FD to fd_t to be a integer truncate
- std.debug.assert(@as(FDImpl, @bitCast(@as(u64, 512))).value.as_system == 512);
+ bun.assert(@as(FDImpl, @bitCast(@as(u64, 512))).value.as_system == 512);
}
}
@@ -110,7 +110,7 @@ pub const FDImpl = packed struct {
if (env.os == .windows) {
// the current process fd is max usize
// https://learn.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-getcurrentprocess
- std.debug.assert(@intFromPtr(system_fd) <= std.math.maxInt(SystemAsInt));
+ bun.assert(@intFromPtr(system_fd) <= std.math.maxInt(SystemAsInt));
}
return fromSystemWithoutAssertion(system_fd);
@@ -215,7 +215,7 @@ pub const FDImpl = packed struct {
pub fn closeAllowingStdoutAndStderr(this: FDImpl) ?bun.sys.Error {
if (allow_assert) {
- std.debug.assert(this.value.as_system != invalid_value); // probably a UAF
+ bun.assert(this.value.as_system != invalid_value); // probably a UAF
}
// Format the file descriptor for logging BEFORE closing it.
@@ -226,8 +226,8 @@ pub const FDImpl = packed struct {
const result: ?bun.sys.Error = switch (env.os) {
.linux => result: {
const fd = this.encode();
- std.debug.assert(fd != bun.invalid_fd);
- std.debug.assert(fd.cast() > -1);
+ bun.assert(fd != bun.invalid_fd);
+ bun.assert(fd.cast() > -1);
break :result switch (linux.getErrno(linux.close(fd.cast()))) {
.BADF => bun.sys.Error{ .errno = @intFromEnum(os.E.BADF), .syscall = .close, .fd = fd },
else => null,
@@ -235,8 +235,8 @@ pub const FDImpl = packed struct {
},
.mac => result: {
const fd = this.encode();
- std.debug.assert(fd != bun.invalid_fd);
- std.debug.assert(fd.cast() > -1);
+ bun.assert(fd != bun.invalid_fd);
+ bun.assert(fd.cast() > -1);
break :result switch (bun.sys.system.getErrno(bun.sys.system.@"close$NOCANCEL"(fd.cast()))) {
.BADF => bun.sys.Error{ .errno = @intFromEnum(os.E.BADF), .syscall = .close, .fd = fd },
else => null,
@@ -254,7 +254,7 @@ pub const FDImpl = packed struct {
null;
},
.system => {
- std.debug.assert(this.value.as_system != 0);
+ bun.assert(this.value.as_system != 0);
const handle: System = @ptrFromInt(@as(u64, this.value.as_system));
break :result switch (bun.windows.NtClose(handle)) {
.SUCCESS => null,
@@ -399,6 +399,6 @@ pub const FDImpl = packed struct {
}
pub fn assertValid(this: FDImpl) void {
- std.debug.assert(this.isValid());
+ bun.assert(this.isValid());
}
};
diff --git a/src/fs.zig b/src/fs.zig
index 2270d9bfe9c204..cf32e22b9b4b04 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -671,8 +671,8 @@ pub const FileSystem = struct {
}
pub fn promoteToCWD(this: *TmpfilePosix, from_name: [*:0]const u8, name: [*:0]const u8) !void {
- std.debug.assert(this.fd != bun.invalid_fd);
- std.debug.assert(this.dir_fd != bun.invalid_fd);
+ bun.assert(this.fd != bun.invalid_fd);
+ bun.assert(this.dir_fd != bun.invalid_fd);
try C.moveFileZWithHandle(this.fd, this.dir_fd, bun.sliceTo(from_name, 0), bun.toFD(std.fs.cwd().fd), bun.sliceTo(name, 0));
this.close();
@@ -872,7 +872,7 @@ pub const FileSystem = struct {
hash_bytes_remain = hash_bytes_remain[@sizeOf(@TypeOf(this.size))..];
std.mem.writeInt(@TypeOf(this.mtime), hash_bytes_remain[0..@sizeOf(@TypeOf(this.mtime))], this.mtime, .little);
hash_bytes_remain = hash_bytes_remain[@sizeOf(@TypeOf(this.mtime))..];
- std.debug.assert(hash_bytes_remain.len == 8);
+ bun.assert(hash_bytes_remain.len == 8);
hash_bytes_remain[0..8].* = @as([8]u8, @bitCast(@as(u64, 0)));
return bun.hash(&hash_bytes);
}
@@ -1287,7 +1287,7 @@ pub const FileSystem = struct {
_kind = _stat.kind;
}
- std.debug.assert(_kind != .SymLink);
+ bun.assert(_kind != .SymLink);
if (_kind == .Directory) {
cache.kind = .dir;
@@ -1364,7 +1364,7 @@ pub const FileSystem = struct {
_kind = _stat.kind;
}
- std.debug.assert(_kind != .sym_link);
+ bun.assert(_kind != .sym_link);
if (_kind == .directory) {
cache.kind = .dir;
@@ -1486,7 +1486,7 @@ pub const PathName = struct {
}
if (comptime Environment.allow_assert) {
- std.debug.assert(!strings.includes(self.base, "/"));
+ bun.assert(!strings.includes(self.base, "/"));
}
// /bar/foo.js -> foo
@@ -1537,8 +1537,8 @@ pub const PathName = struct {
if (comptime Environment.isWindows and Environment.isDebug) {
// This path is likely incorrect. I think it may be *possible*
// but it is almost entirely certainly a bug.
- std.debug.assert(!strings.startsWith(_path, "/:/"));
- std.debug.assert(!strings.startsWith(_path, "\\:\\"));
+ bun.assert(!strings.startsWith(_path, "/:/"));
+ bun.assert(!strings.startsWith(_path, "\\:\\"));
}
var path = _path;
diff --git a/src/futex.zig b/src/futex.zig
index 2fc5194b3c83bd..d8d9a8a303f3a2 100644
--- a/src/futex.zig
+++ b/src/futex.zig
@@ -12,7 +12,7 @@ const Futex = @This();
const target = builtin.target;
const single_threaded = builtin.single_threaded;
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
const testing = std.testing;
const Atomic = std.atomic.Value;
diff --git a/src/glob.zig b/src/glob.zig
index df0d4c82891400..524b40c15e45c8 100644
--- a/src/glob.zig
+++ b/src/glob.zig
@@ -244,7 +244,7 @@ pub fn GlobWalker_(
}
if (bun.Environment.allow_assert) {
- std.debug.assert(this.fds_open == 0);
+ bun.assert(this.fds_open == 0);
}
}
@@ -264,7 +264,7 @@ pub fn GlobWalker_(
if (bun.Environment.allow_assert) {
this.fds_open += 1;
// If this is over 2 then this means that there is a bug in the iterator code
- std.debug.assert(this.fds_open <= 2);
+ bun.assert(this.fds_open <= 2);
}
}
diff --git a/src/hive_array.zig b/src/hive_array.zig
index 22593f064e683e..05df5f19557fef 100644
--- a/src/hive_array.zig
+++ b/src/hive_array.zig
@@ -1,5 +1,5 @@
const std = @import("std");
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
const mem = std.mem;
const testing = std.testing;
diff --git a/src/http.zig b/src/http.zig
index b913778656cb91..0fa3b00bb7e756 100644
--- a/src/http.zig
+++ b/src/http.zig
@@ -388,12 +388,12 @@ fn NewHTTPContext(comptime ssl: bool) type {
log("releaseSocket(0x{})", .{bun.fmt.hexIntUpper(@intFromPtr(socket.socket))});
if (comptime Environment.allow_assert) {
- std.debug.assert(!socket.isClosed());
- std.debug.assert(!socket.isShutdown());
- std.debug.assert(socket.isEstablished());
+ assert(!socket.isClosed());
+ assert(!socket.isShutdown());
+ assert(socket.isEstablished());
}
- std.debug.assert(hostname.len > 0);
- std.debug.assert(port > 0);
+ assert(hostname.len > 0);
+ assert(port > 0);
if (hostname.len <= MAX_KEEPALIVE_HOSTNAME and !socket.isClosedOrHasError() and socket.isEstablished()) {
if (this.pending_sockets.get()) |pending| {
@@ -427,13 +427,13 @@ fn NewHTTPContext(comptime ssl: bool) type {
}
if (active.get(PooledSocket)) |pooled| {
- std.debug.assert(context().pending_sockets.put(pooled));
+ assert(context().pending_sockets.put(pooled));
}
socket.ext(**anyopaque).?.* = bun.cast(**anyopaque, ActiveSocket.init(&dead_socket).ptr());
socket.close(0, null);
if (comptime Environment.allow_assert) {
- std.debug.assert(false);
+ assert(false);
}
}
pub fn onHandshake(
@@ -474,7 +474,7 @@ fn NewHTTPContext(comptime ssl: bool) type {
}
if (active.get(PooledSocket)) |pooled| {
- std.debug.assert(context().pending_sockets.put(pooled));
+ assert(context().pending_sockets.put(pooled));
}
// we can reach here if we are aborted
@@ -497,7 +497,7 @@ fn NewHTTPContext(comptime ssl: bool) type {
}
if (tagged.get(PooledSocket)) |pooled| {
- std.debug.assert(context().pending_sockets.put(pooled));
+ assert(context().pending_sockets.put(pooled));
}
return;
@@ -553,7 +553,7 @@ fn NewHTTPContext(comptime ssl: bool) type {
socket,
);
} else if (tagged.get(PooledSocket)) |pooled| {
- std.debug.assert(context().pending_sockets.put(pooled));
+ assert(context().pending_sockets.put(pooled));
return;
}
}
@@ -569,7 +569,7 @@ fn NewHTTPContext(comptime ssl: bool) type {
socket,
);
} else if (tagged.get(PooledSocket)) |pooled| {
- std.debug.assert(context().pending_sockets.put(pooled));
+ assert(context().pending_sockets.put(pooled));
return;
}
@@ -591,7 +591,7 @@ fn NewHTTPContext(comptime ssl: bool) type {
socket,
);
} else if (tagged.get(PooledSocket)) |pooled| {
- std.debug.assert(context().pending_sockets.put(pooled));
+ assert(context().pending_sockets.put(pooled));
return;
}
@@ -614,7 +614,7 @@ fn NewHTTPContext(comptime ssl: bool) type {
if (strings.eqlLong(socket.hostname_buf[0..socket.hostname_len], hostname, true)) {
const http_socket = socket.http_socket;
- std.debug.assert(context().pending_sockets.put(socket));
+ assert(context().pending_sockets.put(socket));
if (http_socket.isClosed()) {
http_socket.ext(**anyopaque).?.* = bun.cast(**anyopaque, ActiveSocket.init(&dead_socket).ptr());
@@ -902,7 +902,7 @@ pub fn checkServerIdentity(
const cert = bun.default_allocator.alloc(u8, @intCast(cert_size)) catch @panic("OOM");
var cert_ptr = cert.ptr;
const result_size = BoringSSL.i2d_X509(x509, &cert_ptr);
- std.debug.assert(result_size == cert_size);
+ assert(result_size == cert_size);
var hostname = client.hostname orelse client.url.hostname;
if (client.http_proxy) |proxy| {
@@ -953,9 +953,9 @@ pub fn onOpen(
) void {
if (comptime Environment.allow_assert) {
if (client.http_proxy) |proxy| {
- std.debug.assert(is_ssl == proxy.isHTTPS());
+ assert(is_ssl == proxy.isHTTPS());
} else {
- std.debug.assert(is_ssl == client.url.isHTTPS());
+ assert(is_ssl == client.url.isHTTPS());
}
}
if (client.signals.aborted != null) {
@@ -1292,7 +1292,7 @@ const Decompressor = union(enum) {
switch (this.*) {
.zlib => |reader| {
- std.debug.assert(reader.zlib.avail_in == 0);
+ assert(reader.zlib.avail_in == 0);
reader.zlib.next_in = buffer.ptr;
reader.zlib.avail_in = @as(u32, @truncate(buffer.len));
@@ -1812,7 +1812,7 @@ pub const AsyncHTTP = struct {
.redirect_type = redirect_type,
};
if (options.unix_socket_path) |val| {
- std.debug.assert(this.client.unix_socket_path.length() == 0);
+ assert(this.client.unix_socket_path.length() == 0);
this.client.unix_socket_path = val;
}
if (options.disable_timeout) |val| {
@@ -1991,7 +1991,7 @@ pub const AsyncHTTP = struct {
while (true) {
const result: HTTPClientResult = ctx.channel.readItem() catch unreachable;
if (result.fail) |e| return e;
- std.debug.assert(result.metadata != null);
+ assert(result.metadata != null);
return result.metadata.?.response;
}
@@ -1999,7 +1999,7 @@ pub const AsyncHTTP = struct {
}
pub fn onAsyncHTTPCallback(this: *AsyncHTTP, result: HTTPClientResult) void {
- std.debug.assert(this.real != null);
+ assert(this.real != null);
var callback = this.result_callback;
this.elapsed = http_thread.timer.read() -| this.elapsed;
@@ -2032,7 +2032,7 @@ pub const AsyncHTTP = struct {
}
const active_requests = AsyncHTTP.active_requests_count.fetchSub(1, .Monotonic);
- std.debug.assert(active_requests > 0);
+ assert(active_requests > 0);
if (active_requests >= AsyncHTTP.max_simultaneous_requests.load(.Monotonic)) {
http_thread.drainEvents();
@@ -2181,7 +2181,7 @@ pub fn doRedirect(this: *HTTPClient, comptime is_ssl: bool, ctx: *NewHTTPContext
NewHTTPContext(is_ssl).ActiveSocket.init(&dead_socket).ptr(),
);
if (this.isKeepAlivePossible()) {
- std.debug.assert(this.connected_url.hostname.len > 0);
+ assert(this.connected_url.hostname.len > 0);
ctx.releaseSocket(
socket,
this.connected_url.hostname,
@@ -2194,7 +2194,7 @@ pub fn doRedirect(this: *HTTPClient, comptime is_ssl: bool, ctx: *NewHTTPContext
this.connected_url = URL{};
const body_out_str = this.state.body_out_str.?;
this.remaining_redirect_count -|= 1;
- std.debug.assert(this.redirect_type == FetchRedirect.follow);
+ assert(this.redirect_type == FetchRedirect.follow);
// TODO: should this check be before decrementing the redirect count?
// the current logic will allow one less redirect than requested
@@ -2230,7 +2230,7 @@ pub fn isHTTPS(this: *HTTPClient) bool {
pub fn start(this: *HTTPClient, body: HTTPRequestBody, body_out_str: *MutableString) void {
body_out_str.reset();
- std.debug.assert(this.state.response_message_buffer.list.capacity == 0);
+ assert(this.state.response_message_buffer.list.capacity == 0);
this.state = InternalState.init(body, body_out_str);
if (this.isHTTPS()) {
@@ -2265,7 +2265,7 @@ fn start_(this: *HTTPClient, comptime is_ssl: bool) void {
if (socket.isClosed() and (this.state.response_stage != .done and this.state.response_stage != .fail)) {
this.fail(error.ConnectionClosed);
- std.debug.assert(this.state.fail != null);
+ assert(this.state.fail != null);
return;
}
}
@@ -2350,19 +2350,19 @@ pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_s
}
const headers_len = list.items.len;
- std.debug.assert(list.items.len == writer.context.items.len);
+ assert(list.items.len == writer.context.items.len);
if (this.state.request_body.len > 0 and list.capacity - list.items.len > 0 and !this.proxy_tunneling) {
var remain = list.items.ptr[list.items.len..list.capacity];
const wrote = @min(remain.len, this.state.request_body.len);
- std.debug.assert(wrote > 0);
+ assert(wrote > 0);
@memcpy(remain[0..wrote], this.state.request_body[0..wrote]);
list.items.len += wrote;
}
const to_send = list.items[this.state.request_sent_len..];
if (comptime Environment.allow_assert) {
- std.debug.assert(!socket.isShutdown());
- std.debug.assert(!socket.isClosed());
+ assert(!socket.isShutdown());
+ assert(!socket.isClosed());
}
const amount = socket.write(
to_send,
@@ -2407,7 +2407,7 @@ pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_s
} else {
this.state.request_stage = .body;
}
- std.debug.assert(
+ assert(
// we should have leftover data OR we use sendfile()
(this.state.original_request_body == .bytes and this.state.request_body.len > 0) or
this.state.original_request_body == .sendfile,
@@ -2508,19 +2508,19 @@ pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_s
};
const headers_len = list.items.len;
- std.debug.assert(list.items.len == writer.context.items.len);
+ assert(list.items.len == writer.context.items.len);
if (this.state.request_body.len > 0 and list.capacity - list.items.len > 0) {
var remain = list.items.ptr[list.items.len..list.capacity];
const wrote = @min(remain.len, this.state.request_body.len);
- std.debug.assert(wrote > 0);
+ assert(wrote > 0);
@memcpy(remain[0..wrote], this.state.request_body[0..wrote]);
list.items.len += wrote;
}
const to_send = list.items[this.state.request_sent_len..];
if (comptime Environment.allow_assert) {
- std.debug.assert(!socket.isShutdown());
- std.debug.assert(!socket.isClosed());
+ assert(!socket.isShutdown());
+ assert(!socket.isClosed());
}
const amount = proxy.ssl.write(to_send) catch |err| {
@@ -2554,7 +2554,7 @@ pub fn onWritable(this: *HTTPClient, comptime is_first_call: bool, comptime is_s
if (has_sent_headers) {
this.state.request_stage = .proxy_body;
- std.debug.assert(this.state.request_body.len > 0);
+ assert(this.state.request_body.len > 0);
// we sent everything, but there's some body leftover
if (amount == @as(c_int, @intCast(to_send.len))) {
@@ -2937,7 +2937,7 @@ fn fail(this: *HTTPClient, err: anyerror) void {
// We have to clone metadata immediately after use
fn cloneMetadata(this: *HTTPClient) void {
- std.debug.assert(this.state.pending_response != null);
+ assert(this.state.pending_response != null);
if (this.state.pending_response) |response| {
if (this.state.cloned_metadata != null) {
this.state.cloned_metadata.?.deinit(this.allocator);
@@ -2963,7 +2963,7 @@ fn cloneMetadata(this: *HTTPClient) void {
};
} else {
// we should never clone metadata that dont exists
- // we added a empty metadata just in case but will hit the std.debug.assert
+ // we added a empty metadata just in case but will hit the assert
this.state.cloned_metadata = .{};
}
}
@@ -3141,7 +3141,7 @@ pub fn toResult(this: *HTTPClient) HTTPClientResult {
const preallocate_max = 1024 * 1024 * 256;
pub fn handleResponseBody(this: *HTTPClient, incoming_data: []const u8, is_only_buffer: bool) !bool {
- std.debug.assert(this.state.transfer_encoding == .identity);
+ assert(this.state.transfer_encoding == .identity);
const content_length = this.state.content_length;
// is it exactly as much as we need?
if (is_only_buffer and content_length != null and incoming_data.len >= content_length.?) {
@@ -3173,7 +3173,7 @@ fn handleResponseBodyFromSinglePacket(this: *HTTPClient, incoming_data: []const
try body_buffer.growBy(@max(@as(usize, @intFromFloat(min)), 32));
}
- // std.debug.assert(!body_buffer.owns(b));
+ // assert(!body_buffer.owns(b));
try this.state.decompressBytes(incoming_data, body_buffer);
} else {
try this.state.getBodyBuffer().appendSliceExact(incoming_data);
@@ -3183,7 +3183,7 @@ fn handleResponseBodyFromSinglePacket(this: *HTTPClient, incoming_data: []const
if (comptime Environment.allow_assert) {
// i'm not sure why this would happen and i haven't seen it happen
// but we should check
- std.debug.assert(this.state.getBodyBuffer().list.items.ptr != this.state.response_message_buffer.list.items.ptr);
+ assert(this.state.getBodyBuffer().list.items.ptr != this.state.response_message_buffer.list.items.ptr);
}
this.state.response_message_buffer.deinit();
@@ -3321,7 +3321,7 @@ fn handleResponseBodyChunkedEncodingFromSinglePacket(
incoming_data: []const u8,
) !bool {
var decoder = &this.state.chunked_decoder;
- std.debug.assert(incoming_data.len <= single_packet_small_buffer.len);
+ assert(incoming_data.len <= single_packet_small_buffer.len);
// set consume_trailer to 1 to discard the trailing header
// using content-encoding per chunk is not supported
@@ -3375,7 +3375,7 @@ fn handleResponseBodyChunkedEncodingFromSinglePacket(
this.state.received_last_chunk = true;
try this.handleResponseBodyFromSinglePacket(buffer);
- std.debug.assert(this.state.body_out_str.?.list.items.ptr != buffer.ptr);
+ assert(this.state.body_out_str.?.list.items.ptr != buffer.ptr);
if (this.progress_node) |progress| {
progress.activate();
progress.setCompletedItems(buffer.len);
@@ -3567,7 +3567,7 @@ pub fn handleResponseMetadata(
_ = string_builder.append(location);
if (comptime Environment.allow_assert)
- std.debug.assert(string_builder.cap == string_builder.len);
+ assert(string_builder.cap == string_builder.len);
const normalized_url = JSC.URL.hrefFromString(bun.String.fromBytes(string_builder.allocatedSlice()));
defer normalized_url.deref();
@@ -3611,7 +3611,7 @@ pub fn handleResponseMetadata(
_ = string_builder.append(location);
if (comptime Environment.allow_assert)
- std.debug.assert(string_builder.cap == string_builder.len);
+ assert(string_builder.cap == string_builder.len);
const normalized_url = JSC.URL.hrefFromString(bun.String.fromBytes(string_builder.allocatedSlice()));
defer normalized_url.deref();
@@ -3741,3 +3741,5 @@ pub fn handleResponseMetadata(
return ShouldContinue.finished;
}
}
+
+const assert = bun.assert;
diff --git a/src/http/websocket.zig b/src/http/websocket.zig
index 4a41e27c534822..00eb00a5f73437 100644
--- a/src/http/websocket.zig
+++ b/src/http/websocket.zig
@@ -54,12 +54,12 @@ pub const WebsocketHeader = packed struct {
stream.writer().writeInt(u16, @as(u16, @bitCast(header)), .big) catch unreachable;
stream.pos = 0;
const casted = stream.reader().readInt(u16, .big) catch unreachable;
- std.debug.assert(casted == @as(u16, @bitCast(header)));
- std.debug.assert(std.meta.eql(@as(WebsocketHeader, @bitCast(casted)), header));
+ bun.assert(casted == @as(u16, @bitCast(header)));
+ bun.assert(std.meta.eql(@as(WebsocketHeader, @bitCast(casted)), header));
}
try writer.writeInt(u16, @as(u16, @bitCast(header)), .big);
- std.debug.assert(header.len == packLength(n));
+ bun.assert(header.len == packLength(n));
}
pub fn packLength(length: usize) u7 {
diff --git a/src/http/websocket_http_client.zig b/src/http/websocket_http_client.zig
index 3e6ecddb03e081..1941bdf8d88dc2 100644
--- a/src/http/websocket_http_client.zig
+++ b/src/http/websocket_http_client.zig
@@ -270,7 +270,7 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type {
header_values: ?[*]const JSC.ZigString,
header_count: usize,
) callconv(.C) ?*HTTPClient {
- std.debug.assert(global.bunVM().event_loop_handle != null);
+ bun.assert(global.bunVM().event_loop_handle != null);
var client_protocol_hash: u64 = 0;
const body = buildRequestBody(
@@ -413,10 +413,10 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type {
pub fn handleOpen(this: *HTTPClient, socket: Socket) void {
log("onOpen", .{});
- std.debug.assert(socket.socket == this.tcp.?.socket);
+ bun.assert(socket.socket == this.tcp.?.socket);
- std.debug.assert(this.input_body_buf.len > 0);
- std.debug.assert(this.to_send.len == 0);
+ bun.assert(this.input_body_buf.len > 0);
+ bun.assert(this.to_send.len == 0);
if (comptime ssl) {
if (this.hostname.len > 0) {
@@ -443,10 +443,10 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type {
return;
}
- std.debug.assert(socket.socket == this.tcp.?.socket);
+ bun.assert(socket.socket == this.tcp.?.socket);
if (comptime Environment.allow_assert)
- std.debug.assert(!socket.isShutdown());
+ bun.assert(!socket.isShutdown());
var body = data;
if (this.body.items.len > 0) {
@@ -485,7 +485,7 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type {
pub fn handleEnd(this: *HTTPClient, socket: Socket) void {
log("onEnd", .{});
- std.debug.assert(socket.socket == this.tcp.?.socket);
+ bun.assert(socket.socket == this.tcp.?.socket);
this.terminate(ErrorCode.ended);
}
@@ -611,7 +611,7 @@ pub fn NewHTTPUpgradeClient(comptime ssl: bool) type {
this: *HTTPClient,
socket: Socket,
) void {
- std.debug.assert(socket.socket == this.tcp.?.socket);
+ bun.assert(socket.socket == this.tcp.?.socket);
if (this.to_send.len == 0)
return;
@@ -705,7 +705,7 @@ pub const Mask = struct {
}
// hint to the compiler not to vectorize the next loop
- std.debug.assert(input.len < strings.ascii_vector_size);
+ bun.assert(input.len < strings.ascii_vector_size);
}
if (comptime !skip_mask) {
@@ -851,8 +851,8 @@ const Copy = union(enum) {
pub fn copy(this: @This(), globalThis: *JSC.JSGlobalObject, buf: []u8, content_byte_len: usize, opcode: Opcode) void {
if (this == .raw) {
- std.debug.assert(buf.len >= this.raw.len);
- std.debug.assert(buf.ptr != this.raw.ptr);
+ bun.assert(buf.len >= this.raw.len);
+ bun.assert(buf.ptr != this.raw.ptr);
@memcpy(buf[0..this.raw.len], this.raw);
return;
}
@@ -882,14 +882,14 @@ const Copy = union(enum) {
header.final = true;
header.opcode = opcode;
- std.debug.assert(WebsocketHeader.frameSizeIncludingMask(content_byte_len) == buf.len);
+ bun.assert(WebsocketHeader.frameSizeIncludingMask(content_byte_len) == buf.len);
switch (this) {
.utf16 => |utf16| {
header.len = WebsocketHeader.packLength(content_byte_len);
const encode_into_result = strings.copyUTF16IntoUTF8(to_mask, []const u16, utf16, true);
- std.debug.assert(@as(usize, encode_into_result.written) == content_byte_len);
- std.debug.assert(@as(usize, encode_into_result.read) == utf16.len);
+ bun.assert(@as(usize, encode_into_result.written) == content_byte_len);
+ bun.assert(@as(usize, encode_into_result.read) == utf16.len);
header.len = WebsocketHeader.packLength(encode_into_result.written);
var fib = std.io.fixedBufferStream(buf);
header.writeHeader(fib.writer(), encode_into_result.written) catch unreachable;
@@ -898,10 +898,10 @@ const Copy = union(enum) {
},
.latin1 => |latin1| {
const encode_into_result = strings.copyLatin1IntoUTF8(to_mask, []const u8, latin1);
- std.debug.assert(@as(usize, encode_into_result.written) == content_byte_len);
+ bun.assert(@as(usize, encode_into_result.written) == content_byte_len);
// latin1 can contain non-ascii
- std.debug.assert(@as(usize, encode_into_result.read) == latin1.len);
+ bun.assert(@as(usize, encode_into_result.read) == latin1.len);
header.len = WebsocketHeader.packLength(encode_into_result.written);
var fib = std.io.fixedBufferStream(buf);
@@ -969,7 +969,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
if (comptime Environment.isPosix) {
if (vm.event_loop_handle) |other| {
- std.debug.assert(other == loop);
+ bun.assert(other == loop);
}
}
@@ -1130,7 +1130,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
}
pub fn consume(this: *WebSocket, data_: []const u8, left_in_fragment: usize, kind: Opcode, is_final: bool) usize {
- std.debug.assert(data_.len <= left_in_fragment);
+ bun.assert(data_.len <= left_in_fragment);
// did all the data fit in the buffer?
// we can avoid copying & allocating a temporary buffer
@@ -1181,7 +1181,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
// handleWithoutDeinit is supposed to clear the handler from WebSocket*
// to prevent an infinite loop
- std.debug.assert(this.initial_data_handler == null);
+ bun.assert(this.initial_data_handler == null);
// If we disconnected for any reason in the re-entrant case, we should just ignore the data
if (this.outgoing_websocket == null or this.tcp.isShutdown() or this.tcp.isClosed())
@@ -1238,7 +1238,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
// +---------------------------------------------------------------+
.need_header => {
if (data.len < 2) {
- std.debug.assert(data.len > 0);
+ bun.assert(data.len > 0);
if (this.header_fragment == null) {
this.header_fragment = data[0];
break;
@@ -1531,7 +1531,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
fn sendData(this: *WebSocket, bytes: Copy, do_write: bool, opcode: Opcode) bool {
var content_byte_len: usize = 0;
const write_len = bytes.len(&content_byte_len);
- std.debug.assert(write_len > 0);
+ bun.assert(write_len > 0);
var writable = this.send_buffer.writableWithSize(write_len) catch unreachable;
bytes.copy(this.globalThis, writable[0..write_len], content_byte_len, opcode);
@@ -1539,9 +1539,9 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
if (do_write) {
if (comptime Environment.allow_assert) {
- std.debug.assert(!this.tcp.isShutdown());
- std.debug.assert(!this.tcp.isClosed());
- std.debug.assert(this.tcp.isEstablished());
+ bun.assert(!this.tcp.isShutdown());
+ bun.assert(!this.tcp.isClosed());
+ bun.assert(this.tcp.isEstablished());
}
return this.sendBuffer(this.send_buffer.readableSlice(0));
}
@@ -1553,7 +1553,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
this: *WebSocket,
out_buf: []const u8,
) bool {
- std.debug.assert(out_buf.len > 0);
+ bun.assert(out_buf.len > 0);
// Do not set MSG_MORE, see https://github.com/oven-sh/bun/issues/4010
const wrote = this.tcp.write(out_buf, false);
if (wrote < 0) {
@@ -1646,7 +1646,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
}
pub fn handleEnd(this: *WebSocket, socket: Socket) void {
- std.debug.assert(socket.socket == this.tcp.socket);
+ bun.assert(socket.socket == this.tcp.socket);
this.terminate(ErrorCode.ended);
}
@@ -1655,7 +1655,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
socket: Socket,
) void {
if (this.close_received) return;
- std.debug.assert(socket.socket == this.tcp.socket);
+ bun.assert(socket.socket == this.tcp.socket);
const send_buf = this.send_buffer.readableSlice(0);
if (send_buf.len == 0)
return;
@@ -1733,7 +1733,7 @@ pub fn NewWebSocketClient(comptime ssl: bool) type {
const bytes = Copy{ .utf16 = str.utf16SliceAligned() };
var byte_len: usize = 0;
const frame_size = bytes.len(&byte_len);
- std.debug.assert(frame_size <= stack_frame_size);
+ bun.assert(frame_size <= stack_frame_size);
bytes.copy(this.globalThis, inline_buf[0..frame_size], byte_len, opcode);
_ = this.enqueueEncodedBytes(this.tcp, inline_buf[0..frame_size]);
return;
diff --git a/src/install/bin.zig b/src/install/bin.zig
index 47433c8324ed64..0a24b2016d6253 100644
--- a/src/install/bin.zig
+++ b/src/install/bin.zig
@@ -364,7 +364,7 @@ pub const Bin = extern struct {
var filename3_buf: bun.WPathBuffer = undefined;
if (comptime Environment.allow_assert) {
- std.debug.assert(strings.hasPrefixComptime(target_path, "..\\"));
+ bun.assert(strings.hasPrefixComptime(target_path, "..\\"));
}
const target_wpath = bun.strings.toWPathNormalized(&filename1_buf, target_path[3..]);
diff --git a/src/install/dependency.zig b/src/install/dependency.zig
index 98f84fee82081e..aeb48cbe040a73 100644
--- a/src/install/dependency.zig
+++ b/src/install/dependency.zig
@@ -289,7 +289,7 @@ pub const Version = struct {
}
pub fn isLessThan(string_buf: []const u8, lhs: Dependency.Version, rhs: Dependency.Version) bool {
- if (comptime Environment.allow_assert) std.debug.assert(lhs.tag == rhs.tag);
+ if (comptime Environment.allow_assert) bun.assert(lhs.tag == rhs.tag);
return strings.cmpStringsAsc({}, lhs.literal.slice(string_buf), rhs.literal.slice(string_buf));
}
@@ -832,7 +832,7 @@ pub fn parseWithTag(
alias;
// name should never be empty
- if (comptime Environment.allow_assert) std.debug.assert(!actual.isEmpty());
+ if (comptime Environment.allow_assert) bun.assert(!actual.isEmpty());
return .{
.literal = sliced.value(),
@@ -900,7 +900,7 @@ pub fn parseWithTag(
}
}
- if (comptime Environment.allow_assert) std.debug.assert(isGitHubRepoPath(input));
+ if (comptime Environment.allow_assert) bun.assert(isGitHubRepoPath(input));
var hash_index: usize = 0;
var slash_index: usize = 0;
@@ -1186,10 +1186,10 @@ pub const Behavior = packed struct(u8) {
}
comptime {
- std.debug.assert(@as(u8, @bitCast(Behavior.normal)) == (1 << 1));
- std.debug.assert(@as(u8, @bitCast(Behavior.optional)) == (1 << 2));
- std.debug.assert(@as(u8, @bitCast(Behavior.dev)) == (1 << 3));
- std.debug.assert(@as(u8, @bitCast(Behavior.peer)) == (1 << 4));
- std.debug.assert(@as(u8, @bitCast(Behavior.workspace)) == (1 << 5));
+ bun.assert(@as(u8, @bitCast(Behavior.normal)) == (1 << 1));
+ bun.assert(@as(u8, @bitCast(Behavior.optional)) == (1 << 2));
+ bun.assert(@as(u8, @bitCast(Behavior.dev)) == (1 << 3));
+ bun.assert(@as(u8, @bitCast(Behavior.peer)) == (1 << 4));
+ bun.assert(@as(u8, @bitCast(Behavior.workspace)) == (1 << 5));
}
};
diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig
index 6c06083fbe711f..02a4e0e7d3bb67 100644
--- a/src/install/extract_tarball.zig
+++ b/src/install/extract_tarball.zig
@@ -176,7 +176,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD
}
if (comptime Environment.allow_assert) {
- std.debug.assert(tmp.len > 0);
+ bun.assert(tmp.len > 0);
}
break :brk tmp;
@@ -226,7 +226,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD
needs_first_dirname: bool = true,
outdirname: *[]const u8,
pub fn onFirstDirectoryName(dirname_reader: *@This(), first_dirname: []const u8) void {
- std.debug.assert(dirname_reader.needs_first_dirname);
+ bun.assert(dirname_reader.needs_first_dirname);
dirname_reader.needs_first_dirname = false;
dirname_reader.outdirname.* = FileSystem.DirnameStore.instance.append([]const u8, first_dirname) catch unreachable;
}
diff --git a/src/install/install.zig b/src/install/install.zig
index 3c91ed7748379f..d201c5aea94ae2 100644
--- a/src/install/install.zig
+++ b/src/install/install.zig
@@ -142,7 +142,7 @@ pub fn ExternalSliceAligned(comptime Type: type, comptime alignment_: ?u29) type
pub inline fn get(this: Slice, in: []const Type) []const Type {
if (comptime Environment.allow_assert) {
- std.debug.assert(this.off + this.len <= in.len);
+ bun.assert(this.off + this.len <= in.len);
}
// it should be impossible to address this out of bounds due to the minimum here
return in.ptr[this.off..@min(in.len, this.off + this.len)];
@@ -150,15 +150,15 @@ pub fn ExternalSliceAligned(comptime Type: type, comptime alignment_: ?u29) type
pub inline fn mut(this: Slice, in: []Type) []Type {
if (comptime Environment.allow_assert) {
- std.debug.assert(this.off + this.len <= in.len);
+ bun.assert(this.off + this.len <= in.len);
}
return in.ptr[this.off..@min(in.len, this.off + this.len)];
}
pub fn init(buf: []const Type, in: []const Type) Slice {
// if (comptime Environment.allow_assert) {
- // std.debug.assert(@intFromPtr(buf.ptr) <= @intFromPtr(in.ptr));
- // std.debug.assert((@intFromPtr(in.ptr) + in.len) <= (@intFromPtr(buf.ptr) + buf.len));
+ // bun.assert(@intFromPtr(buf.ptr) <= @intFromPtr(in.ptr));
+ // bun.assert((@intFromPtr(in.ptr) + in.len) <= (@intFromPtr(buf.ptr) + buf.len));
// }
return Slice{
@@ -2031,7 +2031,7 @@ pub const PackageInstall = struct {
defer _ = bun.sys.close(fd);
const size = fd.asFile().readAll(temp_buffer) catch return true;
const decoded = WinBinLinkingShim.looseDecode(temp_buffer[0..size]) orelse return true;
- std.debug.assert(decoded.flags.isValid()); // looseDecode ensures valid flags
+ bun.assert(decoded.flags.isValid()); // looseDecode ensures valid flags
break :bin_path decoded.bin_path;
};
@@ -2694,7 +2694,7 @@ pub const PackageManager = struct {
const index = this.lockfile.buffers.dependencies.items.len;
this.lockfile.buffers.dependencies.append(this.allocator, dep) catch unreachable;
this.lockfile.buffers.resolutions.append(this.allocator, invalid_package_id) catch unreachable;
- if (comptime Environment.allow_assert) std.debug.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len);
+ if (comptime Environment.allow_assert) bun.assert(this.lockfile.buffers.dependencies.items.len == this.lockfile.buffers.resolutions.items.len);
break :brk index;
}));
@@ -3544,7 +3544,7 @@ pub const PackageManager = struct {
Features.npm,
));
- if (comptime Environment.allow_assert) std.debug.assert(package.meta.id != invalid_package_id);
+ if (comptime Environment.allow_assert) bun.assert(package.meta.id != invalid_package_id);
defer successFn(this, dependency_id, package.meta.id);
return switch (this.determinePreinstallState(package, this.lockfile)) {
@@ -3635,9 +3635,9 @@ pub const PackageManager = struct {
fn assignResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void {
const buffers = &this.lockfile.buffers;
if (comptime Environment.allow_assert) {
- std.debug.assert(dependency_id < buffers.resolutions.items.len);
- std.debug.assert(package_id < this.lockfile.packages.len);
- // std.debug.assert(buffers.resolutions.items[dependency_id] == invalid_package_id);
+ bun.assert(dependency_id < buffers.resolutions.items.len);
+ bun.assert(package_id < this.lockfile.packages.len);
+ // bun.assert(buffers.resolutions.items[dependency_id] == invalid_package_id);
}
buffers.resolutions.items[dependency_id] = package_id;
const string_buf = buffers.string_bytes.items;
@@ -3651,9 +3651,9 @@ pub const PackageManager = struct {
fn assignRootResolution(this: *PackageManager, dependency_id: DependencyID, package_id: PackageID) void {
const buffers = &this.lockfile.buffers;
if (comptime Environment.allow_assert) {
- std.debug.assert(dependency_id < buffers.resolutions.items.len);
- std.debug.assert(package_id < this.lockfile.packages.len);
- std.debug.assert(buffers.resolutions.items[dependency_id] == invalid_package_id);
+ bun.assert(dependency_id < buffers.resolutions.items.len);
+ bun.assert(package_id < this.lockfile.packages.len);
+ bun.assert(buffers.resolutions.items[dependency_id] == invalid_package_id);
}
buffers.resolutions.items[dependency_id] = package_id;
const string_buf = buffers.string_bytes.items;
@@ -4327,7 +4327,7 @@ pub const PackageManager = struct {
const name_str = this.lockfile.str(&name);
const task_id = Task.Id.forManifest(name_str);
- if (comptime Environment.allow_assert) std.debug.assert(task_id != 0);
+ if (comptime Environment.allow_assert) bun.assert(task_id != 0);
if (comptime Environment.allow_assert)
debug(
@@ -4616,7 +4616,7 @@ pub const PackageManager = struct {
}
// should not trigger a network call
- if (comptime Environment.allow_assert) std.debug.assert(result.network_task == null);
+ if (comptime Environment.allow_assert) bun.assert(result.network_task == null);
if (comptime Environment.allow_assert)
debug(
@@ -5147,7 +5147,7 @@ pub const PackageManager = struct {
var builder = manager.lockfile.stringBuilder();
Lockfile.Package.Scripts.parseCount(manager.allocator, &builder, json);
builder.allocate() catch unreachable;
- if (comptime Environment.allow_assert) std.debug.assert(package_id.* != invalid_package_id);
+ if (comptime Environment.allow_assert) bun.assert(package_id.* != invalid_package_id);
var scripts = manager.lockfile.packages.items(.scripts)[package_id.*];
scripts.parseAlloc(manager.allocator, &builder, json);
scripts.filled = true;
@@ -5198,7 +5198,7 @@ pub const PackageManager = struct {
var network_tasks_batch = manager.async_network_task_queue.popBatch();
var network_tasks_iter = network_tasks_batch.iterator();
while (network_tasks_iter.next()) |task| {
- if (comptime Environment.allow_assert) std.debug.assert(manager.pendingTaskCount() > 0);
+ if (comptime Environment.allow_assert) bun.assert(manager.pendingTaskCount() > 0);
_ = manager.pending_tasks.fetchSub(1, .Monotonic);
// We cannot free the network task at the end of this scope.
// It may continue to be referenced in a future task.
@@ -5523,7 +5523,7 @@ pub const PackageManager = struct {
var resolve_tasks_batch = manager.resolve_tasks.popBatch();
var resolve_tasks_iter = resolve_tasks_batch.iterator();
while (resolve_tasks_iter.next()) |task| {
- if (comptime Environment.allow_assert) std.debug.assert(manager.pendingTaskCount() > 0);
+ if (comptime Environment.allow_assert) bun.assert(manager.pendingTaskCount() > 0);
defer manager.preallocated_resolve_tasks.put(task);
_ = manager.pending_tasks.fetchSub(1, .Monotonic);
@@ -6492,7 +6492,7 @@ pub const PackageManager = struct {
for (deps) |dep| {
if (dep.data == .e_missing) has_missing = true;
}
- std.debug.assert(has_missing);
+ bun.assert(has_missing);
}
var i = deps.len;
@@ -6512,7 +6512,7 @@ pub const PackageManager = struct {
}
if (comptime Environment.allow_assert) {
- for (deps) |dep| std.debug.assert(dep.data != .e_missing);
+ for (deps) |dep| bun.assert(dep.data != .e_missing);
}
break :brk deps;
@@ -6703,7 +6703,7 @@ pub const PackageManager = struct {
for (deps) |dep| {
if (dep.data == .e_missing) has_missing = true;
}
- std.debug.assert(has_missing);
+ bun.assert(has_missing);
}
var i = deps.len;
@@ -6723,7 +6723,7 @@ pub const PackageManager = struct {
}
if (comptime Environment.allow_assert) {
- for (deps) |dep| std.debug.assert(dep.data != .e_missing);
+ for (deps) |dep| bun.assert(dep.data != .e_missing);
}
break :brk deps;
@@ -6731,7 +6731,7 @@ pub const PackageManager = struct {
outer: for (updates) |*request| {
if (request.e_string != null) continue;
- defer if (comptime Environment.allow_assert) std.debug.assert(request.e_string != null);
+ defer if (comptime Environment.allow_assert) bun.assert(request.e_string != null);
var k: usize = 0;
while (k < new_dependencies.len) : (k += 1) {
@@ -8739,7 +8739,7 @@ pub const PackageManager = struct {
/// if the tree is finished.
pub fn incrementTreeInstallCount(this: *PackageInstaller, tree_id: Lockfile.Tree.Id, comptime log_level: Options.LogLevel) void {
if (comptime Environment.allow_assert) {
- std.debug.assert(tree_id != Lockfile.Tree.invalid_id);
+ bun.assert(tree_id != Lockfile.Tree.invalid_id);
}
const trees = this.lockfile.buffers.trees.items;
@@ -8958,8 +8958,8 @@ pub const PackageManager = struct {
comptime log_level: Options.LogLevel,
) usize {
if (comptime Environment.allow_assert) {
- std.debug.assert(resolution_tag != .root);
- std.debug.assert(package_id != 0);
+ bun.assert(resolution_tag != .root);
+ bun.assert(package_id != 0);
}
var count: usize = 0;
const scripts = brk: {
@@ -8995,7 +8995,7 @@ pub const PackageManager = struct {
};
if (comptime Environment.allow_assert) {
- std.debug.assert(scripts.filled);
+ bun.assert(scripts.filled);
}
switch (resolution_tag) {
@@ -9839,14 +9839,14 @@ pub const PackageManager = struct {
if (comptime Environment.allow_assert) {
if (trees.len > 0) {
// last tree should not depend on another except for itself
- std.debug.assert(tree_ids_to_trees_the_id_depends_on.at(trees.len - 1).count() == 1 and tree_ids_to_trees_the_id_depends_on.at(trees.len - 1).isSet(trees.len - 1));
+ bun.assert(tree_ids_to_trees_the_id_depends_on.at(trees.len - 1).count() == 1 and tree_ids_to_trees_the_id_depends_on.at(trees.len - 1).isSet(trees.len - 1));
// root tree should always depend on all trees
- std.debug.assert(tree_ids_to_trees_the_id_depends_on.at(0).count() == trees.len);
+ bun.assert(tree_ids_to_trees_the_id_depends_on.at(0).count() == trees.len);
}
// a tree should always depend on itself
for (0..trees.len) |j| {
- std.debug.assert(tree_ids_to_trees_the_id_depends_on.at(j).isSet(j));
+ bun.assert(tree_ids_to_trees_the_id_depends_on.at(j).isSet(j));
}
}
@@ -9946,7 +9946,7 @@ pub const PackageManager = struct {
}
if (comptime Environment.allow_assert) {
- std.debug.assert(node_modules.dependencies.len == this.lockfile.buffers.trees.items[installer.current_tree_id].dependencies.len);
+ bun.assert(node_modules.dependencies.len == this.lockfile.buffers.trees.items[installer.current_tree_id].dependencies.len);
}
// cache line is 64 bytes on ARM64 and x64
@@ -10570,7 +10570,7 @@ pub const PackageManager = struct {
);
if (comptime Environment.allow_assert) {
- std.debug.assert(first_index != -1);
+ bun.assert(first_index != -1);
}
if (first_index != -1) {
@@ -10592,7 +10592,7 @@ pub const PackageManager = struct {
);
if (comptime Environment.allow_assert) {
- std.debug.assert(first_index != -1);
+ bun.assert(first_index != -1);
}
inline for (entries, 0..) |maybe_entry, i| {
@@ -10738,7 +10738,7 @@ pub const PackageManager = struct {
if (manager.options.do.run_scripts) {
if (manager.root_lifecycle_scripts) |scripts| {
if (comptime Environment.allow_assert) {
- std.debug.assert(scripts.total > 0);
+ bun.assert(scripts.total > 0);
}
if (comptime log_level != .silent) {
@@ -10864,9 +10864,9 @@ pub const PackageManager = struct {
if (comptime Environment.allow_assert) {
// if packages_count is greater than 0, scripts_count must also be greater than 0.
- std.debug.assert(packages_count == 0 or scripts_count > 0);
+ bun.assert(packages_count == 0 or scripts_count > 0);
// if scripts_count is 1, it's only possible for packages_count to be 1.
- std.debug.assert(scripts_count != 1 or packages_count == 1);
+ bun.assert(scripts_count != 1 or packages_count == 1);
}
if (packages_count > 0) {
@@ -10905,7 +10905,7 @@ pub const PackageManager = struct {
var PATH = try std.ArrayList(u8).initCapacity(bun.default_allocator, original_path.len + 1 + "node_modules/.bin".len + cwd.len + 1);
var current_dir: ?*DirInfo = this_bundler.resolver.readDirInfo(cwd) catch null;
- std.debug.assert(current_dir != null);
+ bun.assert(current_dir != null);
while (current_dir) |dir| {
if (PATH.items.len > 0 and PATH.items[PATH.items.len - 1] != std.fs.path.delimiter) {
try PATH.append(std.fs.path.delimiter);
diff --git a/src/install/integrity.zig b/src/install/integrity.zig
index 6fdeb12375f119..726f824ee4a537 100644
--- a/src/install/integrity.zig
+++ b/src/install/integrity.zig
@@ -34,9 +34,9 @@ pub const Integrity = extern struct {
var i: usize = 0;
// initializer should zero it out
- if (comptime bun.Environment.allow_assert) {
+ if (comptime bun.Environment.isDebug) {
for (integrity.value) |c| {
- std.debug.assert(c == 0);
+ bun.assert(c == 0);
}
}
diff --git a/src/install/lifecycle_script_runner.zig b/src/install/lifecycle_script_runner.zig
index ec060508e7c196..1043870f4359d4 100644
--- a/src/install/lifecycle_script_runner.zig
+++ b/src/install/lifecycle_script_runner.zig
@@ -52,19 +52,19 @@ pub const LifecycleScriptSubprocess = struct {
}
pub fn scriptName(this: *const LifecycleScriptSubprocess) []const u8 {
- std.debug.assert(this.current_script_index < Lockfile.Scripts.names.len);
+ bun.assert(this.current_script_index < Lockfile.Scripts.names.len);
return Lockfile.Scripts.names[this.current_script_index];
}
pub fn onReaderDone(this: *LifecycleScriptSubprocess) void {
- std.debug.assert(this.remaining_fds > 0);
+ bun.assert(this.remaining_fds > 0);
this.remaining_fds -= 1;
this.maybeFinished();
}
pub fn onReaderError(this: *LifecycleScriptSubprocess, err: bun.sys.Error) void {
- std.debug.assert(this.remaining_fds > 0);
+ bun.assert(this.remaining_fds > 0);
this.remaining_fds -= 1;
Output.prettyErrorln("error: Failed to read {s} script output from \"{s}\" due to error {d} {s}", .{
@@ -384,7 +384,7 @@ pub const LifecycleScriptSubprocess = struct {
pub fn resetPolls(this: *LifecycleScriptSubprocess) void {
if (comptime Environment.allow_assert) {
- std.debug.assert(this.remaining_fds == 0);
+ bun.assert(this.remaining_fds == 0);
}
if (this.process) |process| {
diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig
index 77f8509b70c00e..e794f35d046854 100644
--- a/src/install/lockfile.zig
+++ b/src/install/lockfile.zig
@@ -192,7 +192,7 @@ pub const LoadFromDiskResult = union(enum) {
};
pub fn loadFromDisk(this: *Lockfile, allocator: Allocator, log: *logger.Log, filename: stringZ) LoadFromDiskResult {
- if (comptime Environment.allow_assert) std.debug.assert(FileSystem.instance_loaded);
+ if (comptime Environment.allow_assert) assert(FileSystem.instance_loaded);
const buf = (if (filename.len > 0)
File.readFrom(std.fs.cwd(), filename, allocator).unwrap()
@@ -532,7 +532,7 @@ pub const Tree = struct {
}
if (next.dependencies.len == 0) {
- if (comptime Environment.allow_assert) std.debug.assert(builder.list.len == next.id + 1);
+ if (comptime Environment.allow_assert) assert(builder.list.len == next.id + 1);
_ = builder.list.pop();
}
}
@@ -1512,7 +1512,7 @@ pub const Printer = struct {
behavior = dep.behavior;
// assert its sorted
- if (comptime Environment.allow_assert) std.debug.assert(dependency_behavior_change_count < 3);
+ if (comptime Environment.allow_assert) assert(dependency_behavior_change_count < 3);
}
try writer.writeAll(" ");
@@ -1539,19 +1539,19 @@ pub const Printer = struct {
};
pub fn verifyData(this: *const Lockfile) !void {
- std.debug.assert(this.format == Lockfile.FormatVersion.current);
+ assert(this.format == Lockfile.FormatVersion.current);
var i: usize = 0;
while (i < this.packages.len) : (i += 1) {
const package: Lockfile.Package = this.packages.get(i);
- std.debug.assert(this.str(&package.name).len == @as(usize, package.name.len()));
- std.debug.assert(String.Builder.stringHash(this.str(&package.name)) == @as(usize, package.name_hash));
- std.debug.assert(package.dependencies.get(this.buffers.dependencies.items).len == @as(usize, package.dependencies.len));
- std.debug.assert(package.resolutions.get(this.buffers.resolutions.items).len == @as(usize, package.resolutions.len));
- std.debug.assert(package.resolutions.get(this.buffers.resolutions.items).len == @as(usize, package.dependencies.len));
+ assert(this.str(&package.name).len == @as(usize, package.name.len()));
+ assert(String.Builder.stringHash(this.str(&package.name)) == @as(usize, package.name_hash));
+ assert(package.dependencies.get(this.buffers.dependencies.items).len == @as(usize, package.dependencies.len));
+ assert(package.resolutions.get(this.buffers.resolutions.items).len == @as(usize, package.resolutions.len));
+ assert(package.resolutions.get(this.buffers.resolutions.items).len == @as(usize, package.dependencies.len));
const dependencies = package.dependencies.get(this.buffers.dependencies.items);
for (dependencies) |dependency| {
- std.debug.assert(this.str(&dependency.name).len == @as(usize, dependency.name.len()));
- std.debug.assert(String.Builder.stringHash(this.str(&dependency.name)) == dependency.name_hash);
+ assert(this.str(&dependency.name).len == @as(usize, dependency.name.len()));
+ assert(String.Builder.stringHash(this.str(&dependency.name)) == dependency.name_hash);
}
}
}
@@ -1608,7 +1608,7 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ) void {
Output.prettyErrorln("error: failed to verify lockfile: {s}", .{@errorName(err)});
Global.crash();
};
- std.debug.assert(FileSystem.instance_loaded);
+ assert(FileSystem.instance_loaded);
}
var bytes = std.ArrayList(u8).init(bun.default_allocator);
@@ -1733,7 +1733,7 @@ pub fn getPackageID(
switch (entry) {
.PackageID => |id| {
- if (comptime Environment.allow_assert) std.debug.assert(id < resolutions.len);
+ if (comptime Environment.allow_assert) assert(id < resolutions.len);
if (resolutions[id].eql(resolution, buf, buf)) {
return id;
@@ -1745,7 +1745,7 @@ pub fn getPackageID(
},
.PackageIDMultiple => |ids| {
for (ids.items) |id| {
- if (comptime Environment.allow_assert) std.debug.assert(id < resolutions.len);
+ if (comptime Environment.allow_assert) assert(id < resolutions.len);
if (resolutions[id].eql(resolution, buf, buf)) {
return id;
@@ -1812,7 +1812,7 @@ pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) !Lockfile.Pack
fn appendPackageWithID(this: *Lockfile, package_: Lockfile.Package, id: PackageID) !Lockfile.Package {
defer {
if (comptime Environment.allow_assert) {
- std.debug.assert(this.getPackageID(package_.name_hash, null, &package_.resolution) != null);
+ assert(this.getPackageID(package_.name_hash, null, &package_.resolution) != null);
}
}
var package = package_;
@@ -1874,7 +1874,7 @@ pub const StringBuilder = struct {
}
pub fn clamp(this: *StringBuilder) void {
- if (comptime Environment.allow_assert) std.debug.assert(this.cap >= this.len);
+ if (comptime Environment.allow_assert) assert(this.cap >= this.len);
const excess = this.cap - this.len;
@@ -1906,15 +1906,15 @@ pub const StringBuilder = struct {
};
}
if (comptime Environment.allow_assert) {
- std.debug.assert(this.len <= this.cap); // didn't count everything
- std.debug.assert(this.ptr != null); // must call allocate first
+ assert(this.len <= this.cap); // didn't count everything
+ assert(this.ptr != null); // must call allocate first
}
bun.copy(u8, this.ptr.?[this.len..this.cap], slice);
const final_slice = this.ptr.?[this.len..this.cap][0..slice.len];
this.len += slice.len;
- if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap);
+ if (comptime Environment.allow_assert) assert(this.len <= this.cap);
return switch (Type) {
String => String.init(this.lockfile.buffers.string_bytes.items, final_slice),
@@ -1933,8 +1933,8 @@ pub const StringBuilder = struct {
}
if (comptime Environment.allow_assert) {
- std.debug.assert(this.len <= this.cap); // didn't count everything
- std.debug.assert(this.ptr != null); // must call allocate first
+ assert(this.len <= this.cap); // didn't count everything
+ assert(this.ptr != null); // must call allocate first
}
const string_entry = this.lockfile.string_pool.getOrPut(hash) catch unreachable;
@@ -1946,7 +1946,7 @@ pub const StringBuilder = struct {
string_entry.value_ptr.* = String.init(this.lockfile.buffers.string_bytes.items, final_slice);
}
- if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap);
+ if (comptime Environment.allow_assert) assert(this.len <= this.cap);
return switch (Type) {
String => string_entry.value_ptr.*,
@@ -2070,7 +2070,7 @@ pub const OverrideMap = struct {
builder: *Lockfile.StringBuilder,
) !void {
if (Environment.allow_assert) {
- std.debug.assert(this.map.entries.len == 0); // only call parse once
+ assert(this.map.entries.len == 0); // only call parse once
}
if (expr.asProperty("overrides")) |overrides| {
try this.parseFromOverrides(lockfile, root_package, json_source, log, overrides.expr, builder);
@@ -2397,7 +2397,7 @@ pub const Package = extern struct {
pub fn first(this: Package.Scripts.List) Lockfile.Scripts.Entry {
if (comptime Environment.allow_assert) {
- std.debug.assert(this.items[this.first_index] != null);
+ assert(this.items[this.first_index] != null);
}
return this.items[this.first_index].?;
}
@@ -2925,7 +2925,7 @@ pub const Package = extern struct {
};
const total_len = dependencies_list.items.len + total_dependencies_count;
- if (comptime Environment.allow_assert) std.debug.assert(dependencies_list.items.len == resolutions_list.items.len);
+ if (comptime Environment.allow_assert) assert(dependencies_list.items.len == resolutions_list.items.len);
var dependencies: []Dependency = dependencies_list.items.ptr[dependencies_list.items.len..total_len];
@memset(dependencies, Dependency{});
@@ -3025,7 +3025,7 @@ pub const Package = extern struct {
const version_strings = map.value.get(manifest.external_strings_for_versions);
total_dependencies_count += map.value.len;
- if (comptime Environment.isDebug) std.debug.assert(keys.len == version_strings.len);
+ if (comptime Environment.isDebug) assert(keys.len == version_strings.len);
for (keys, version_strings) |key, ver| {
string_builder.count(key.slice(string_buf));
@@ -3069,7 +3069,7 @@ pub const Package = extern struct {
};
const total_len = dependencies_list.items.len + total_dependencies_count;
- if (comptime Environment.allow_assert) std.debug.assert(dependencies_list.items.len == resolutions_list.items.len);
+ if (comptime Environment.allow_assert) assert(dependencies_list.items.len == resolutions_list.items.len);
var dependencies = dependencies_list.items.ptr[dependencies_list.items.len..total_len];
@memset(dependencies, .{});
@@ -3080,7 +3080,7 @@ pub const Package = extern struct {
const keys = map.name.get(manifest.external_strings);
const version_strings = map.value.get(manifest.external_strings_for_versions);
- if (comptime Environment.isDebug) std.debug.assert(keys.len == version_strings.len);
+ if (comptime Environment.isDebug) assert(keys.len == version_strings.len);
const is_peer = comptime strings.eqlComptime(group.field, "peer_dependencies");
list: for (keys, version_strings, 0..) |key, version_string_, i| {
@@ -3658,8 +3658,8 @@ pub const Package = extern struct {
);
});
if (comptime Environment.allow_assert) {
- std.debug.assert(path.len() > 0);
- std.debug.assert(!std.fs.path.isAbsolute(path.slice(buf)));
+ assert(path.len() > 0);
+ assert(!std.fs.path.isAbsolute(path.slice(buf)));
}
dependency_version.literal = path;
dependency_version.value.workspace = path;
@@ -3776,7 +3776,7 @@ pub const Package = extern struct {
pub fn insert(self: *WorkspaceMap, key: string, value: Entry) !void {
if (comptime Environment.allow_assert) {
- std.debug.assert(!strings.containsChar(key, std.fs.path.sep_windows));
+ assert(!strings.containsChar(key, std.fs.path.sep_windows));
}
if (comptime Environment.isDebug) {
@@ -4065,7 +4065,7 @@ pub const Package = extern struct {
}
const dir_fd = entry.cache.fd;
- std.debug.assert(dir_fd != bun.invalid_fd); // kind() should've opened
+ assert(dir_fd != bun.invalid_fd); // kind() should've opened
defer fallback.fixed_buffer_allocator.reset();
const workspace_entry = processWorkspaceName(
@@ -4428,7 +4428,7 @@ pub const Package = extern struct {
const off = lockfile.buffers.dependencies.items.len;
const total_len = off + total_dependencies_count;
- if (comptime Environment.allow_assert) std.debug.assert(lockfile.buffers.dependencies.items.len == lockfile.buffers.resolutions.items.len);
+ if (comptime Environment.allow_assert) assert(lockfile.buffers.dependencies.items.len == lockfile.buffers.resolutions.items.len);
const package_dependencies = lockfile.buffers.dependencies.items.ptr[off..total_len];
@@ -4493,7 +4493,7 @@ pub const Package = extern struct {
extern_strings[i] = string_builder.append(ExternalString, bin_prop.value.?.asString(allocator) orelse break :bin);
i += 1;
}
- if (comptime Environment.allow_assert) std.debug.assert(i == extern_strings.len);
+ if (comptime Environment.allow_assert) assert(i == extern_strings.len);
package.bin = .{
.tag = .map,
.value = .{ .map = ExternalStringList.init(lockfile.buffers.extern_strings.items, extern_strings) },
@@ -4882,7 +4882,7 @@ pub const Package = extern struct {
debug("save(\"{s}\") = {d} bytes", .{ field.name, std.mem.sliceAsBytes(value).len });
if (comptime strings.eqlComptime(field.name, "meta")) {
for (value) |meta| {
- std.debug.assert(meta.has_install_script != .old);
+ assert(meta.has_install_script != .old);
}
}
}
@@ -5294,7 +5294,7 @@ const Buffers = struct {
{
var external_deps = external_dependency_list.ptr;
const dependencies = this.dependencies.items;
- if (comptime Environment.allow_assert) std.debug.assert(external_dependency_list.len == dependencies.len);
+ if (comptime Environment.allow_assert) assert(external_dependency_list.len == dependencies.len);
for (dependencies) |*dep| {
dep.* = Dependency.toDependency(external_deps[0], extern_context);
external_deps += 1;
@@ -5370,16 +5370,16 @@ pub const Serializer = struct {
for (this.packages.items(.resolution)) |res| {
switch (res.tag) {
.folder => {
- std.debug.assert(!strings.containsChar(this.str(&res.value.folder), std.fs.path.sep_windows));
+ assert(!strings.containsChar(this.str(&res.value.folder), std.fs.path.sep_windows));
},
.symlink => {
- std.debug.assert(!strings.containsChar(this.str(&res.value.symlink), std.fs.path.sep_windows));
+ assert(!strings.containsChar(this.str(&res.value.symlink), std.fs.path.sep_windows));
},
.local_tarball => {
- std.debug.assert(!strings.containsChar(this.str(&res.value.local_tarball), std.fs.path.sep_windows));
+ assert(!strings.containsChar(this.str(&res.value.local_tarball), std.fs.path.sep_windows));
},
.workspace => {
- std.debug.assert(!strings.containsChar(this.str(&res.value.workspace), std.fs.path.sep_windows));
+ assert(!strings.containsChar(this.str(&res.value.workspace), std.fs.path.sep_windows));
},
else => {},
}
@@ -5688,7 +5688,7 @@ pub const Serializer = struct {
}
}
- if (comptime Environment.allow_assert) std.debug.assert(stream.pos == total_buffer_size);
+ if (comptime Environment.allow_assert) assert(stream.pos == total_buffer_size);
// const end = try reader.readInt(u64, .little);
return res;
@@ -5812,7 +5812,7 @@ pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Ve
.PackageID => |id| {
const resolutions = this.packages.items(.resolution);
- if (comptime Environment.allow_assert) std.debug.assert(id < resolutions.len);
+ if (comptime Environment.allow_assert) assert(id < resolutions.len);
if (version.value.npm.version.satisfies(resolutions[id].value.npm.version, buf, buf)) {
return id;
}
@@ -5821,7 +5821,7 @@ pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Ve
const resolutions = this.packages.items(.resolution);
for (ids.items) |id| {
- if (comptime Environment.allow_assert) std.debug.assert(id < resolutions.len);
+ if (comptime Environment.allow_assert) assert(id < resolutions.len);
if (version.value.npm.version.satisfies(resolutions[id].value.npm.version, buf, buf)) {
return id;
}
@@ -6191,3 +6191,5 @@ pub fn jsonStringify(this: *const Lockfile, w: anytype) !void {
}
}
}
+
+const assert = bun.assert;
diff --git a/src/install/migration.zig b/src/install/migration.zig
index 5d314a4280f466..ca206ad1210971 100644
--- a/src/install/migration.zig
+++ b/src/install/migration.zig
@@ -390,7 +390,7 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
const name_hash = stringHash(v.name);
if (comptime Environment.allow_assert) {
- std.debug.assert(!strings.containsChar(k, '\\'));
+ bun.assert(!strings.containsChar(k, '\\'));
}
this.workspace_paths.putAssumeCapacity(name_hash, builder.append(String, k));
@@ -432,7 +432,7 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
if (Environment.allow_assert) {
// If this is false, then it means we wrote wrong resolved ids
// During counting phase we assign all the packages an id.
- std.debug.assert(package_id == id_map.get(pkg_path).?.new_package_id);
+ bun.assert(package_id == id_map.get(pkg_path).?.new_package_id);
}
// Instead of calling this.appendPackage, manually append
@@ -498,8 +498,8 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
},
.bin = if (pkg.get("bin")) |bin| bin: {
// we already check these conditions during counting
- std.debug.assert(bin.data == .e_object);
- std.debug.assert(bin.data.e_object.properties.len > 0);
+ bun.assert(bin.data == .e_object);
+ bun.assert(bin.data.e_object.properties.len > 0);
// in npm lockfile, the bin is always an object, even if it is only a single one
// we need to detect if it's a single entry and lower it to a file.
@@ -541,8 +541,8 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
}
if (Environment.allow_assert) {
- std.debug.assert(this.buffers.extern_strings.items.len == view.off + view.len);
- std.debug.assert(this.buffers.extern_strings.items.len <= this.buffers.extern_strings.capacity);
+ bun.assert(this.buffers.extern_strings.items.len == view.off + view.len);
+ bun.assert(this.buffers.extern_strings.items.len <= this.buffers.extern_strings.capacity);
}
break :bin .{
@@ -557,7 +557,7 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
});
if (is_workspace) {
- std.debug.assert(package_id != 0); // root package should not be in it's own workspace
+ bun.assert(package_id != 0); // root package should not be in it's own workspace
// we defer doing getOrPutID for non-workspace packages because it depends on the resolution being set.
try this.getOrPutID(package_id, name_hash);
@@ -565,7 +565,7 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
}
if (Environment.allow_assert) {
- std.debug.assert(this.packages.len == package_idx);
+ bun.assert(this.packages.len == package_idx);
}
// ignoring length check because we pre-allocated it. the length may shrink later
@@ -586,7 +586,7 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
if (Environment.allow_assert) {
for (resolutions) |r| {
- std.debug.assert(r.tag == .uninitialized or r.tag == .workspace);
+ bun.assert(r.tag == .uninitialized or r.tag == .workspace);
}
}
@@ -622,8 +622,8 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
// Calculate the offset + length by pointer arithmetic
const len: u32 = @truncate((@intFromPtr(resolutions_buf.ptr) - @intFromPtr(resolutions_start)) / @sizeOf(Install.PackageID));
if (Environment.allow_assert) {
- std.debug.assert(len > 0);
- std.debug.assert(len == ((@intFromPtr(dependencies_buf.ptr) - @intFromPtr(dependencies_start)) / @sizeOf(Dependency)));
+ bun.assert(len > 0);
+ bun.assert(len == ((@intFromPtr(dependencies_buf.ptr) - @intFromPtr(dependencies_start)) / @sizeOf(Dependency)));
}
dependencies_list[package_idx] = .{
.off = @truncate((@intFromPtr(dependencies_start) - @intFromPtr(this.buffers.dependencies.items.ptr)) / @sizeOf(Dependency)),
@@ -724,7 +724,7 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
debug("-> {s}, {}\n", .{ @tagName(version.tag), version.value });
if (Environment.allow_assert) {
- std.debug.assert(version.tag != .uninitialized);
+ bun.assert(version.tag != .uninitialized);
}
const str_node_modules = if (pkg_path.len == 0) "node_modules/" else "/node_modules/";
@@ -956,8 +956,8 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
// In allow_assert, we prefill this buffer with uninitialized values that we can detect later
// It is our fault if we hit an error here, making it safe to disable in release.
if (Environment.allow_assert) {
- std.debug.assert(this.buffers.dependencies.items.len == (@intFromPtr(dependencies_buf.ptr) - @intFromPtr(this.buffers.dependencies.items.ptr)) / @sizeOf(Dependency));
- std.debug.assert(this.buffers.dependencies.items.len <= num_deps);
+ bun.assert(this.buffers.dependencies.items.len == (@intFromPtr(dependencies_buf.ptr) - @intFromPtr(this.buffers.dependencies.items.ptr)) / @sizeOf(Dependency));
+ bun.assert(this.buffers.dependencies.items.len <= num_deps);
var crash = false;
for (this.buffers.dependencies.items, 0..) |r, i| {
// 'if behavior is uninitialized'
@@ -989,7 +989,7 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo
// but after we write all the data, there is no excuse for this to fail.
//
// If this is hit, it means getOrPutID was not called on this package id. Look for where 'resolution[i]' is set
- std.debug.assert(this.getPackageID(this.packages.items(.name_hash)[i], null, &r) != null);
+ bun.assert(this.getPackageID(this.packages.items(.name_hash)[i], null, &r) != null);
}
}
if (is_missing_resolutions) {
diff --git a/src/install/npm.zig b/src/install/npm.zig
index 91147553805e72..8df30abf62f6c9 100644
--- a/src/install/npm.zig
+++ b/src/install/npm.zig
@@ -960,7 +960,7 @@ pub const PackageManifest = struct {
const sliced_version = SlicedString.init(version_name, version_name);
const parsed_version = Semver.Version.parse(sliced_version);
- if (Environment.allow_assert) std.debug.assert(parsed_version.valid);
+ if (Environment.allow_assert) assert(parsed_version.valid);
if (!parsed_version.valid) {
log.addErrorFmt(&source, prop.value.?.loc, allocator, "Failed to parse dependency {s}", .{version_name}) catch unreachable;
continue;
@@ -1149,15 +1149,15 @@ pub const PackageManifest = struct {
var sliced_version = SlicedString.init(version_name, version_name);
var parsed_version = Semver.Version.parse(sliced_version);
- if (Environment.allow_assert) std.debug.assert(parsed_version.valid);
+ if (Environment.allow_assert) assert(parsed_version.valid);
// We only need to copy the version tags if it contains pre and/or build
if (parsed_version.version.tag.hasBuild() or parsed_version.version.tag.hasPre()) {
const version_string = string_builder.append(String, version_name);
sliced_version = version_string.sliced(string_buf);
parsed_version = Semver.Version.parse(sliced_version);
if (Environment.allow_assert) {
- std.debug.assert(parsed_version.valid);
- std.debug.assert(parsed_version.version.tag.hasBuild() or parsed_version.version.tag.hasPre());
+ assert(parsed_version.valid);
+ assert(parsed_version.version.tag.hasBuild() or parsed_version.version.tag.hasPre());
}
}
if (!parsed_version.valid) continue;
@@ -1522,13 +1522,13 @@ pub const PackageManifest = struct {
if (comptime Environment.allow_assert) {
const dependencies_list = @field(package_version, pair.field);
- std.debug.assert(dependencies_list.name.off < all_extern_strings.len);
- std.debug.assert(dependencies_list.value.off < all_extern_strings.len);
- std.debug.assert(dependencies_list.name.off + dependencies_list.name.len < all_extern_strings.len);
- std.debug.assert(dependencies_list.value.off + dependencies_list.value.len < all_extern_strings.len);
+ assert(dependencies_list.name.off < all_extern_strings.len);
+ assert(dependencies_list.value.off < all_extern_strings.len);
+ assert(dependencies_list.name.off + dependencies_list.name.len < all_extern_strings.len);
+ assert(dependencies_list.value.off + dependencies_list.value.len < all_extern_strings.len);
- std.debug.assert(std.meta.eql(dependencies_list.name.get(all_extern_strings), this_names));
- std.debug.assert(std.meta.eql(dependencies_list.value.get(version_extern_strings), this_versions));
+ assert(std.meta.eql(dependencies_list.name.get(all_extern_strings), this_names));
+ assert(std.meta.eql(dependencies_list.value.get(version_extern_strings), this_versions));
var j: usize = 0;
const name_dependencies = dependencies_list.name.get(all_extern_strings);
@@ -1536,31 +1536,31 @@ pub const PackageManifest = struct {
if (optional_peer_dep_names.items.len == 0) {
while (j < name_dependencies.len) : (j += 1) {
const dep_name = name_dependencies[j];
- std.debug.assert(std.mem.eql(u8, dep_name.slice(string_buf), this_names[j].slice(string_buf)));
- std.debug.assert(std.mem.eql(u8, dep_name.slice(string_buf), items[j].key.?.asString(allocator).?));
+ assert(std.mem.eql(u8, dep_name.slice(string_buf), this_names[j].slice(string_buf)));
+ assert(std.mem.eql(u8, dep_name.slice(string_buf), items[j].key.?.asString(allocator).?));
}
j = 0;
while (j < dependencies_list.value.len) : (j += 1) {
const dep_name = dependencies_list.value.get(version_extern_strings)[j];
- std.debug.assert(std.mem.eql(u8, dep_name.slice(string_buf), this_versions[j].slice(string_buf)));
- std.debug.assert(std.mem.eql(u8, dep_name.slice(string_buf), items[j].value.?.asString(allocator).?));
+ assert(std.mem.eql(u8, dep_name.slice(string_buf), this_versions[j].slice(string_buf)));
+ assert(std.mem.eql(u8, dep_name.slice(string_buf), items[j].value.?.asString(allocator).?));
}
}
} else {
while (j < name_dependencies.len) : (j += 1) {
const dep_name = name_dependencies[j];
- std.debug.assert(std.mem.eql(u8, dep_name.slice(string_buf), this_names[j].slice(string_buf)));
- std.debug.assert(std.mem.eql(u8, dep_name.slice(string_buf), items[j].key.?.asString(allocator).?));
+ assert(std.mem.eql(u8, dep_name.slice(string_buf), this_names[j].slice(string_buf)));
+ assert(std.mem.eql(u8, dep_name.slice(string_buf), items[j].key.?.asString(allocator).?));
}
j = 0;
while (j < dependencies_list.value.len) : (j += 1) {
const dep_name = dependencies_list.value.get(version_extern_strings)[j];
- std.debug.assert(std.mem.eql(u8, dep_name.slice(string_buf), this_versions[j].slice(string_buf)));
- std.debug.assert(std.mem.eql(u8, dep_name.slice(string_buf), items[j].value.?.asString(allocator).?));
+ assert(std.mem.eql(u8, dep_name.slice(string_buf), this_versions[j].slice(string_buf)));
+ assert(std.mem.eql(u8, dep_name.slice(string_buf), items[j].value.?.asString(allocator).?));
}
}
}
@@ -1613,8 +1613,8 @@ pub const PackageManifest = struct {
};
if (comptime Environment.allow_assert) {
- std.debug.assert(std.meta.eql(result.pkg.dist_tags.versions.get(all_semver_versions), dist_tag_versions[0..dist_tag_i]));
- std.debug.assert(std.meta.eql(result.pkg.dist_tags.tags.get(all_extern_strings), extern_strings_slice[0..dist_tag_i]));
+ assert(std.meta.eql(result.pkg.dist_tags.versions.get(all_semver_versions), dist_tag_versions[0..dist_tag_i]));
+ assert(std.meta.eql(result.pkg.dist_tags.tags.get(all_extern_strings), extern_strings_slice[0..dist_tag_i]));
}
extern_strings = extern_strings[dist_tag_i..];
@@ -1723,13 +1723,13 @@ pub const PackageManifest = struct {
const first = semver_versions_[0];
const second = semver_versions_[1];
const order = second.order(first, string_buf, string_buf);
- std.debug.assert(order == .gt);
+ assert(order == .gt);
}
}
}
},
else => {
- std.debug.assert(max_versions_count == 0);
+ assert(max_versions_count == 0);
},
}
@@ -1737,7 +1737,7 @@ pub const PackageManifest = struct {
const src = std.mem.sliceAsBytes(all_tarball_url_strings[0 .. all_tarball_url_strings.len - tarball_url_strings.len]);
if (src.len > 0) {
var dst = std.mem.sliceAsBytes(all_extern_strings[all_extern_strings.len - extern_strings.len ..]);
- std.debug.assert(dst.len >= src.len);
+ assert(dst.len >= src.len);
@memcpy(dst[0..src.len], src);
}
@@ -1769,3 +1769,5 @@ pub const PackageManifest = struct {
return result;
}
};
+
+const assert = bun.assert;
diff --git a/src/install/repository.zig b/src/install/repository.zig
index d05aad4f30168f..769036a124f760 100644
--- a/src/install/repository.zig
+++ b/src/install/repository.zig
@@ -78,7 +78,7 @@ pub const Repository = extern struct {
buf: []const u8,
repository: *const Repository,
pub fn format(formatter: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
- if (comptime Environment.allow_assert) std.debug.assert(formatter.label.len > 0);
+ if (comptime Environment.allow_assert) bun.assert(formatter.label.len > 0);
try writer.writeAll(formatter.label);
const repo = formatter.repository.repo.slice(formatter.buf);
diff --git a/src/install/semver.zig b/src/install/semver.zig
index 320837ec65a591..84562dc2ea67aa 100644
--- a/src/install/semver.zig
+++ b/src/install/semver.zig
@@ -51,10 +51,10 @@ pub const String = extern struct {
buf: string,
in: string,
) String {
- if (comptime Environment.allow_assert) {
+ if (comptime Environment.isDebug) {
const out = realInit(buf, in);
if (!out.isInline()) {
- std.debug.assert(@as(u64, @bitCast(out.slice(buf)[0..8].*)) != undefined);
+ assert(@as(u64, @bitCast(out.slice(buf)[0..8].*)) != undefined);
}
return out;
@@ -254,7 +254,7 @@ pub const String = extern struct {
in: string,
) Pointer {
if (Environment.allow_assert) {
- std.debug.assert(bun.isSliceInBuffer(in, buf));
+ assert(bun.isSliceInBuffer(in, buf));
}
return Pointer{
@@ -350,15 +350,15 @@ pub const String = extern struct {
}
if (comptime Environment.allow_assert) {
- std.debug.assert(this.len <= this.cap); // didn't count everything
- std.debug.assert(this.ptr != null); // must call allocate first
+ assert(this.len <= this.cap); // didn't count everything
+ assert(this.ptr != null); // must call allocate first
}
bun.copy(u8, this.ptr.?[this.len..this.cap], slice_);
const final_slice = this.ptr.?[this.len..this.cap][0..slice_.len];
this.len += slice_.len;
- if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap);
+ if (comptime Environment.allow_assert) assert(this.len <= this.cap);
switch (Type) {
String => {
@@ -385,15 +385,15 @@ pub const String = extern struct {
}
}
if (comptime Environment.allow_assert) {
- std.debug.assert(this.len <= this.cap); // didn't count everything
- std.debug.assert(this.ptr != null); // must call allocate first
+ assert(this.len <= this.cap); // didn't count everything
+ assert(this.ptr != null); // must call allocate first
}
bun.copy(u8, this.ptr.?[this.len..this.cap], slice_);
const final_slice = this.ptr.?[this.len..this.cap][0..slice_.len];
this.len += slice_.len;
- if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap);
+ if (comptime Environment.allow_assert) assert(this.len <= this.cap);
switch (Type) {
String => {
@@ -420,8 +420,8 @@ pub const String = extern struct {
}
if (comptime Environment.allow_assert) {
- std.debug.assert(this.len <= this.cap); // didn't count everything
- std.debug.assert(this.ptr != null); // must call allocate first
+ assert(this.len <= this.cap); // didn't count everything
+ assert(this.ptr != null); // must call allocate first
}
const string_entry = this.string_pool.getOrPut(hash) catch unreachable;
@@ -433,7 +433,7 @@ pub const String = extern struct {
string_entry.value_ptr.* = String.init(this.allocatedSlice(), final_slice);
}
- if (comptime Environment.allow_assert) std.debug.assert(this.len <= this.cap);
+ if (comptime Environment.allow_assert) assert(this.len <= this.cap);
switch (Type) {
String => {
@@ -550,7 +550,7 @@ pub const BigExternalString = extern struct {
}
pub inline fn init(buf: string, in: string, hash: u64) BigExternalString {
- std.debug.assert(@intFromPtr(buf.ptr) <= @intFromPtr(in.ptr) and ((@intFromPtr(in.ptr) + in.len) <= (@intFromPtr(buf.ptr) + buf.len)));
+ assert(@intFromPtr(buf.ptr) <= @intFromPtr(in.ptr) and ((@intFromPtr(in.ptr) + in.len) <= (@intFromPtr(buf.ptr) + buf.len)));
return BigExternalString{
.off = @as(u32, @truncate(@intFromPtr(in.ptr) - @intFromPtr(buf.ptr))),
@@ -579,7 +579,7 @@ pub const SlicedString = struct {
pub inline fn external(this: SlicedString) ExternalString {
if (comptime Environment.allow_assert) {
- std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len)));
+ assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len)));
}
return ExternalString.init(this.buf, this.slice, bun.Wyhash11.hash(0, this.slice));
@@ -587,7 +587,7 @@ pub const SlicedString = struct {
pub inline fn value(this: SlicedString) String {
if (comptime Environment.allow_assert) {
- std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len)));
+ assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len)));
}
return String.init(this.buf, this.slice);
@@ -961,14 +961,14 @@ pub const Version = extern struct {
.pre => {
result.tag.pre = sliced_string.sub(input[start..i]).external();
if (comptime Environment.isDebug) {
- std.debug.assert(!strings.containsChar(result.tag.pre.slice(sliced_string.buf), '-'));
+ assert(!strings.containsChar(result.tag.pre.slice(sliced_string.buf), '-'));
}
state = State.none;
},
.build => {
result.tag.build = sliced_string.sub(input[start..i]).external();
if (comptime Environment.isDebug) {
- std.debug.assert(!strings.containsChar(result.tag.build.slice(sliced_string.buf), '-'));
+ assert(!strings.containsChar(result.tag.build.slice(sliced_string.buf), '-'));
}
state = State.none;
},
@@ -1244,7 +1244,7 @@ pub const Version = extern struct {
var bytes: [10]u8 = undefined;
var byte_i: u8 = 0;
- std.debug.assert(input[0] != '.');
+ assert(input[0] != '.');
for (input) |char| {
switch (char) {
@@ -1436,7 +1436,7 @@ pub const Range = struct {
pub fn satisfiesPre(range: Range, version: Version, range_buf: string, version_buf: string, pre_matched: *bool) bool {
if (comptime Environment.allow_assert) {
- std.debug.assert(version.tag.hasPre());
+ assert(version.tag.hasPre());
}
const has_left = range.hasLeft();
const has_right = range.hasRight();
@@ -1511,7 +1511,7 @@ pub const Query = struct {
pub fn satisfiesPre(list: *const List, version: Version, list_buf: string, version_buf: string) bool {
if (comptime Environment.allow_assert) {
- std.debug.assert(version.tag.hasPre());
+ assert(version.tag.hasPre());
}
// `version` has a prerelease tag:
@@ -1595,7 +1595,7 @@ pub const Query = struct {
!range.hasRight())
{
if (comptime Environment.allow_assert) {
- std.debug.assert(this.tail == null);
+ assert(this.tail == null);
}
return range.left.version;
}
@@ -1630,7 +1630,7 @@ pub const Query = struct {
}
pub fn toVersion(this: Group) Version {
- std.debug.assert(this.isExact() or this.head.head.range.left.op == .unset);
+ assert(this.isExact() or this.head.head.range.left.op == .unset);
return this.head.head.range.left.version;
}
@@ -1707,7 +1707,7 @@ pub const Query = struct {
pub fn satisfiesPre(query: *const Query, version: Version, query_buf: string, version_buf: string, pre_matched: *bool) bool {
if (comptime Environment.allow_assert) {
- std.debug.assert(version.tag.hasPre());
+ assert(version.tag.hasPre());
}
return query.range.satisfiesPre(
version,
@@ -2299,8 +2299,8 @@ const expect = if (Environment.isTest) struct {
pub var counter: usize = 0;
pub fn isRangeMatch(input: string, version_str: string) bool {
var parsed = Version.parse(SlicedString.init(version_str, version_str));
- std.debug.assert(parsed.valid);
- // std.debug.assert(strings.eql(parsed.version.raw.slice(version_str), version_str));
+ assert(parsed.valid);
+ // assert(strings.eql(parsed.version.raw.slice(version_str), version_str));
var list = Query.parse(
default_allocator,
@@ -2350,7 +2350,7 @@ const expect = if (Environment.isTest) struct {
Output.initTest();
defer counter += 1;
const result = Version.parse(SlicedString.init(input, input));
- std.debug.assert(result.valid);
+ assert(result.valid);
if (v[0] != result.version.major or v[1] != result.version.minor or v[2] != result.version.patch) {
Output.panic("Fail Expected version \"{s}\" to match \"{?d}.{?d}.{?d}\" but received \"{?d}.{?d}.{?d}\"\nAt: {s}:{d}:{d} in {s}", .{
@@ -2673,3 +2673,5 @@ test "Range parsing" {
expect.range("5.0 || 1.2 - 1.3", "5.0.2", @src());
expect.range("5.0 || 1.2 - 1.3 || >8", "9.0.2", @src());
}
+
+const assert = bun.assert;
diff --git a/src/io/PipeReader.zig b/src/io/PipeReader.zig
index 6c0a26b35027fd..5cf081f9471b93 100644
--- a/src/io/PipeReader.zig
+++ b/src/io/PipeReader.zig
@@ -421,7 +421,7 @@ pub fn WindowsPipeReader(
}
}
// ops we should not hit this lets fail with EPIPE
- std.debug.assert(false);
+ bun.assert(false);
return this.onRead(.{ .err = bun.sys.Error.fromCode(bun.C.E.PIPE, .read) }, "", .progress);
},
}
@@ -431,7 +431,7 @@ pub fn WindowsPipeReader(
if (this.flags.is_done or !this.flags.is_paused) return .{ .result = {} };
this.flags.is_paused = false;
const source: Source = this.source orelse return .{ .err = bun.sys.Error.fromCode(bun.C.E.BADF, .read) };
- std.debug.assert(!source.isClosed());
+ bun.assert(!source.isClosed());
switch (source) {
.file => |file| {
@@ -737,7 +737,7 @@ const PosixBufferedReader = struct {
fn closeWithoutReporting(this: *PosixBufferedReader) void {
if (this.getFd() != bun.invalid_fd) {
- std.debug.assert(!this.flags.closed_without_reporting);
+ bun.assert(!this.flags.closed_without_reporting);
this.flags.closed_without_reporting = true;
if (this.flags.close_handle) this.handle.close(this, {});
}
@@ -799,7 +799,7 @@ const PosixBufferedReader = struct {
return;
}
- std.debug.assert(!this.flags.is_done);
+ bun.assert(!this.flags.is_done);
this.flags.is_done = true;
}
@@ -960,7 +960,7 @@ pub const WindowsBufferedReader = struct {
}
pub fn from(to: *WindowsOutputReader, other: anytype, parent: anytype) void {
- std.debug.assert(other.source != null and to.source == null);
+ bun.assert(other.source != null and to.source == null);
to.* = .{
.vtable = to.vtable,
.flags = other.flags,
@@ -1055,7 +1055,7 @@ pub const WindowsBufferedReader = struct {
}
pub fn done(this: *WindowsOutputReader) void {
- if (this.source) |source| std.debug.assert(source.isClosed());
+ if (this.source) |source| bun.assert(source.isClosed());
this.finish();
@@ -1075,7 +1075,7 @@ pub const WindowsBufferedReader = struct {
}
pub fn startWithCurrentPipe(this: *WindowsOutputReader) bun.JSC.Maybe(void) {
- std.debug.assert(!this.source.?.isClosed());
+ bun.assert(!this.source.?.isClosed());
this.source.?.setData(this);
this.buffer().clearRetainingCapacity();
this.flags.is_done = false;
@@ -1088,7 +1088,7 @@ pub const WindowsBufferedReader = struct {
}
pub fn start(this: *WindowsOutputReader, fd: bun.FileDescriptor, _: bool) bun.JSC.Maybe(void) {
- std.debug.assert(this.source == null);
+ bun.assert(this.source == null);
const source = switch (Source.open(uv.Loop.get(), fd)) {
.err => |err| return .{ .err = err },
.result => |source| source,
diff --git a/src/io/PipeWriter.zig b/src/io/PipeWriter.zig
index ba5a502c3e206d..54f4f19e460e26 100644
--- a/src/io/PipeWriter.zig
+++ b/src/io/PipeWriter.zig
@@ -225,7 +225,7 @@ pub fn PosixBufferedWriter(
this: *PosixWriter,
err: bun.sys.Error,
) void {
- std.debug.assert(!err.isRetry());
+ bun.assert(!err.isRetry());
onError(this.parent, err);
@@ -306,7 +306,7 @@ pub fn PosixBufferedWriter(
fn closeWithoutReporting(this: *PosixWriter) void {
if (this.getFd() != bun.invalid_fd) {
- std.debug.assert(!this.closed_without_reporting);
+ bun.assert(!this.closed_without_reporting);
this.closed_without_reporting = true;
if (this.close_fd) this.handle.close(null, {});
}
@@ -350,7 +350,7 @@ pub fn PosixBufferedWriter(
pub fn start(this: *PosixWriter, fd: bun.FileDescriptor, pollable: bool) JSC.Maybe(void) {
this.pollable = pollable;
if (!pollable) {
- std.debug.assert(this.handle != .poll);
+ bun.assert(this.handle != .poll);
this.handle = .{ .fd = fd };
return JSC.Maybe(void){ .result = {} };
}
@@ -417,7 +417,7 @@ pub fn PosixStreamingWriter(
this: *PosixWriter,
err: bun.sys.Error,
) void {
- std.debug.assert(!err.isRetry());
+ bun.assert(!err.isRetry());
this.closeWithoutReporting();
this.is_done = true;
@@ -471,7 +471,7 @@ pub fn PosixStreamingWriter(
fn closeWithoutReporting(this: *PosixWriter) void {
if (this.getFd() != bun.invalid_fd) {
- std.debug.assert(!this.closed_without_reporting);
+ bun.assert(!this.closed_without_reporting);
this.closed_without_reporting = true;
this.handle.close(null, {});
}
@@ -553,7 +553,7 @@ pub fn PosixStreamingWriter(
}
fn _tryWriteNewlyBufferedData(this: *PosixWriter) WriteResult {
- std.debug.assert(!this.is_done);
+ bun.assert(!this.is_done);
switch (@This()._tryWrite(this, this.buffer.items)) {
.wrote => |amt| {
@@ -697,7 +697,7 @@ pub fn PosixStreamingWriter(
pub fn close(this: *PosixWriter) void {
if (this.closed_without_reporting) {
this.closed_without_reporting = false;
- std.debug.assert(this.getFd() == bun.invalid_fd);
+ bun.assert(this.getFd() == bun.invalid_fd);
onClose(@ptrCast(this.parent));
return;
}
@@ -830,14 +830,14 @@ fn BaseWindowsPipeWriter(
}
pub fn startWithPipe(this: *WindowsPipeWriter, pipe: *uv.Pipe) bun.JSC.Maybe(void) {
- std.debug.assert(this.source == null);
+ bun.assert(this.source == null);
this.source = .{ .pipe = pipe };
this.setParent(this.parent);
return this.startWithCurrentPipe();
}
pub fn startSync(this: *WindowsPipeWriter, fd: bun.FileDescriptor, _: bool) bun.JSC.Maybe(void) {
- std.debug.assert(this.source == null);
+ bun.assert(this.source == null);
const source = Source{
.sync_file = Source.openFile(fd),
};
@@ -848,7 +848,7 @@ fn BaseWindowsPipeWriter(
}
pub fn startWithFile(this: *WindowsPipeWriter, fd: bun.FileDescriptor) bun.JSC.Maybe(void) {
- std.debug.assert(this.source == null);
+ bun.assert(this.source == null);
const source: bun.io.Source = .{ .file = Source.openFile(fd) };
source.setData(this);
this.source = source;
@@ -857,7 +857,7 @@ fn BaseWindowsPipeWriter(
}
pub fn start(this: *WindowsPipeWriter, fd: bun.FileDescriptor, _: bool) bun.JSC.Maybe(void) {
- std.debug.assert(this.source == null);
+ bun.assert(this.source == null);
const source = switch (Source.open(uv.Loop.get(), fd)) {
.result => |source| source,
.err => |err| return .{ .err = err },
@@ -909,7 +909,7 @@ pub fn WindowsBufferedWriter(
}
pub fn startWithCurrentPipe(this: *WindowsWriter) bun.JSC.Maybe(void) {
- std.debug.assert(this.source != null);
+ bun.assert(this.source != null);
this.is_done = false;
this.write();
return .{ .result = {} };
@@ -1156,7 +1156,7 @@ pub fn WindowsStreamingWriter(
}
pub fn startWithCurrentPipe(this: *WindowsWriter) bun.JSC.Maybe(void) {
- std.debug.assert(this.source != null);
+ bun.assert(this.source != null);
this.is_done = false;
return .{ .result = {} };
}
@@ -1291,7 +1291,7 @@ pub fn WindowsStreamingWriter(
fn closeWithoutReporting(this: *WindowsWriter) void {
if (this.getFd() != bun.invalid_fd) {
- std.debug.assert(!this.closed_without_reporting);
+ bun.assert(!this.closed_without_reporting);
this.closed_without_reporting = true;
this.close();
}
diff --git a/src/io/fifo.zig b/src/io/fifo.zig
index b9b3bc961048da..64e47d8fc7b814 100644
--- a/src/io/fifo.zig
+++ b/src/io/fifo.zig
@@ -1,5 +1,5 @@
const std = @import("std");
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
/// An intrusive first in/first out linked list.
/// The element type T must have a field called "next" of type ?*T
diff --git a/src/io/heap.zig b/src/io/heap.zig
index 37d8d4d4853ce7..95ccfdb6e5eed8 100644
--- a/src/io/heap.zig
+++ b/src/io/heap.zig
@@ -1,5 +1,5 @@
const std = @import("std");
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
/// An intrusive heap implementation backed by a pairing heap[1] implementation.
///
diff --git a/src/io/io.zig b/src/io/io.zig
index 48bec26d2d91bd..43f2096e89fc4f 100644
--- a/src/io/io.zig
+++ b/src/io/io.zig
@@ -11,7 +11,7 @@ const log = bun.Output.scoped(.loop, false);
const TimerHeap = heap.Intrusive(Timer, void, Timer.less);
const os = std.os;
-const assert = std.debug.assert;
+const assert = bun.assert;
pub const Source = @import("./source.zig").Source;
@@ -71,7 +71,7 @@ pub const Loop = struct {
}
pub fn schedule(this: *Loop, request: *Request) void {
- std.debug.assert(!request.scheduled);
+ bun.assert(!request.scheduled);
request.scheduled = true;
this.pending.push(request);
this.waker.wake();
@@ -620,7 +620,7 @@ pub const Timer = struct {
}
if (batch.last) |last| {
- std.debug.assert(last.next == null);
+ bun.assert(last.next == null);
last.next = concurrent_task;
}
@@ -877,7 +877,7 @@ pub const Poll = struct {
log("register: {s} ({})", .{ @tagName(flag), fd });
- std.debug.assert(fd != bun.invalid_fd);
+ bun.assert(fd != bun.invalid_fd);
if (one_shot) {
this.flags.insert(.one_shot);
diff --git a/src/io/io_darwin.zig b/src/io/io_darwin.zig
index 41dc4465c428b1..577a96b6cd14fe 100644
--- a/src/io/io_darwin.zig
+++ b/src/io/io_darwin.zig
@@ -24,7 +24,7 @@ pub const system = darwin;
const fd_t = os.fd_t;
const mem = std.mem;
-const assert = std.debug.assert;
+const assert = bun.assert;
const c = std.c;
const bun = @import("root").bun;
pub const darwin = struct {
@@ -201,7 +201,7 @@ pub const Waker = struct {
// ×pec,
// );
-// std.debug.assert(errno == 0);
+// bun.assert(errno == 0);
// return UserFilterWaker{
// .kq = kq,
diff --git a/src/io/io_linux.zig b/src/io/io_linux.zig
index d786b7fbc07b56..60bfa1e08234b6 100644
--- a/src/io/io_linux.zig
+++ b/src/io/io_linux.zig
@@ -1,5 +1,5 @@
const std = @import("std");
-const assert = std.debug.assert;
+const assert = bun.assert;
const Platform = bun.analytics.GenerateHeader.GeneratePlatform;
const os = struct {
pub usingnamespace std.os;
diff --git a/src/io/source.zig b/src/io/source.zig
index 41789428b38f75..af73074b31f39a 100644
--- a/src/io/source.zig
+++ b/src/io/source.zig
@@ -138,7 +138,7 @@ pub const Source = union(enum) {
}
pub fn openFile(fd: bun.FileDescriptor) *Source.File {
- std.debug.assert(fd.isValid() and bun.uvfdcast(fd) != -1);
+ bun.assert(fd.isValid() and bun.uvfdcast(fd) != -1);
log("openFile (fd = {})", .{fd});
const file = bun.default_allocator.create(Source.File) catch bun.outOfMemory();
diff --git a/src/io/time.zig b/src/io/time.zig
index 1454d009bafcb3..c3e7dd3e6f1719 100644
--- a/src/io/time.zig
+++ b/src/io/time.zig
@@ -1,5 +1,5 @@
const std = @import("std");
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
const is_darwin = @import("builtin").target.isDarwin();
pub const Time = struct {
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 763964933c0ece..227278b62bbf89 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -65,7 +65,7 @@ pub fn NewBaseStore(comptime Union: anytype, comptime count: usize) type {
}
pub fn append(block: *Block, comptime ValueType: type, value: ValueType) *UnionValueType {
- if (comptime Environment.allow_assert) std.debug.assert(block.used < count);
+ if (comptime Environment.allow_assert) bun.assert(block.used < count);
const index = block.used;
block.items[index][0..value.len].* = value.*;
block.used +|= 1;
@@ -604,7 +604,7 @@ pub const CharFreq = struct {
var deltas: [255]i32 = [_]i32{0} ** 255;
var remain = text;
- std.debug.assert(remain.len >= scan_big_chunk_size);
+ bun.assert(remain.len >= scan_big_chunk_size);
const unrolled = remain.len - (remain.len % scan_big_chunk_size);
const remain_end = remain.ptr + unrolled;
@@ -1463,7 +1463,7 @@ pub const E = struct {
pub fn alphabetizeStrings(this: *Array) void {
if (comptime Environment.allow_assert) {
for (this.items.slice()) |item| {
- std.debug.assert(item.data == .e_string);
+ bun.assert(item.data == .e_string);
}
}
std.sort.pdq(Expr, this.items.slice(), {}, Sorter.isLessThan);
@@ -1849,7 +1849,7 @@ pub const E = struct {
// while (iter.next(&query_string_values_buf)) |entry| {
// str = ZigString.init(entry.name);
- // std.debug.assert(entry.values.len > 0);
+ // bun.assert(entry.values.len > 0);
// if (entry.values.len > 1) {
// var values = query_string_value_refs_buf[0..entry.values.len];
// for (entry.values) |value, i| {
@@ -2095,7 +2095,7 @@ pub const E = struct {
pub fn alphabetizeProperties(this: *Object) void {
if (comptime Environment.allow_assert) {
for (this.properties.slice()) |prop| {
- std.debug.assert(prop.key.?.data == .e_string);
+ bun.assert(prop.key.?.data == .e_string);
}
}
std.sort.pdq(G.Property, this.properties.slice(), {}, Sorter.isLessThan);
@@ -2191,8 +2191,8 @@ pub const E = struct {
pub var class = E.String{ .data = "class" };
pub fn push(this: *String, other: *String) void {
- std.debug.assert(this.isUTF8());
- std.debug.assert(other.isUTF8());
+ bun.assert(this.isUTF8());
+ bun.assert(other.isUTF8());
if (other.rope_len == 0) {
other.rope_len = @as(u32, @truncate(other.data.len));
@@ -2234,7 +2234,7 @@ pub const E = struct {
}
pub fn slice16(this: *const String) []const u16 {
- std.debug.assert(this.is_utf16);
+ bun.assert(this.is_utf16);
return @as([*]const u16, @ptrCast(@alignCast(this.data.ptr)))[0..this.data.len];
}
@@ -2494,7 +2494,7 @@ pub const E = struct {
};
}
- std.debug.assert(this.head == .cooked);
+ bun.assert(this.head == .cooked);
if (this.parts.len == 0) {
return Expr.init(E.String, this.head.cooked, loc);
@@ -2504,7 +2504,7 @@ pub const E = struct {
var head = Expr.init(E.String, this.head.cooked, loc);
for (this.parts) |part_| {
var part = part_;
- std.debug.assert(part.tail == .cooked);
+ bun.assert(part.tail == .cooked);
switch (part.value.data) {
.e_number => {
@@ -2540,7 +2540,7 @@ pub const E = struct {
continue;
} else {
var prev_part = &parts.items[parts.items.len - 1];
- std.debug.assert(prev_part.tail == .cooked);
+ bun.assert(prev_part.tail == .cooked);
if (prev_part.tail.cooked.isUTF8()) {
if (part.value.data.e_string.len() > 0) {
@@ -3359,7 +3359,7 @@ pub const Expr = struct {
}
pub fn joinAllWithComma(all: []Expr, allocator: std.mem.Allocator) Expr {
- std.debug.assert(all.len > 0);
+ bun.assert(all.len > 0);
switch (all.len) {
1 => {
return all[0];
@@ -3736,7 +3736,7 @@ pub const Expr = struct {
if (comptime Environment.isDebug) {
// Sanity check: assert string is not a null ptr
if (st.data.len > 0 and st.isUTF8()) {
- std.debug.assert(@intFromPtr(st.data.ptr) > 0);
+ bun.assert(@intFromPtr(st.data.ptr) > 0);
}
}
return Expr{
@@ -4109,7 +4109,7 @@ pub const Expr = struct {
if (comptime Environment.isDebug) {
// Sanity check: assert string is not a null ptr
if (st.data.len > 0 and st.isUTF8()) {
- std.debug.assert(@intFromPtr(st.data.ptr) > 0);
+ bun.assert(@intFromPtr(st.data.ptr) > 0);
}
}
return Expr{
@@ -6868,7 +6868,7 @@ pub const Macro = struct {
else
import_record_path;
- std.debug.assert(!isMacroPath(import_record_path_without_macro_prefix));
+ bun.assert(!isMacroPath(import_record_path_without_macro_prefix));
const input_specifier = brk: {
if (JSC.HardcodedModule.Aliases.get(import_record_path, .bun)) |replacement| {
@@ -7470,7 +7470,7 @@ pub const ASTMemoryAllocator = struct {
pub fn pop(this: *ASTMemoryAllocator) void {
const prev = this.previous;
- std.debug.assert(prev != this);
+ bun.assert(prev != this);
Stmt.Data.Store.memory_allocator = prev;
Expr.Data.Store.memory_allocator = prev;
this.previous = null;
diff --git a/src/js_lexer.zig b/src/js_lexer.zig
index 2b193a5f960833..47b8a8db3f888a 100644
--- a/src/js_lexer.zig
+++ b/src/js_lexer.zig
@@ -909,18 +909,18 @@ fn NewLexer_(
i: usize = 0,
pub fn append(fake: *FakeArrayList16, value: u16) !void {
- std.debug.assert(fake.items.len > fake.i);
+ bun.assert(fake.items.len > fake.i);
fake.items[fake.i] = value;
fake.i += 1;
}
pub fn appendAssumeCapacity(fake: *FakeArrayList16, value: u16) void {
- std.debug.assert(fake.items.len > fake.i);
+ bun.assert(fake.items.len > fake.i);
fake.items[fake.i] = value;
fake.i += 1;
}
pub fn ensureUnusedCapacity(fake: *FakeArrayList16, int: anytype) !void {
- std.debug.assert(fake.items.len > fake.i + int);
+ bun.assert(fake.items.len > fake.i + int);
}
};
threadlocal var large_escape_sequence_list: std.ArrayList(u16) = undefined;
@@ -1957,7 +1957,7 @@ fn NewLexer_(
if (@reduce(.Max, hashtag + at) == 1) {
rest.len = @intFromPtr(end) - @intFromPtr(rest.ptr);
if (comptime Environment.allow_assert) {
- std.debug.assert(
+ bun.assert(
strings.containsChar(&@as([strings.ascii_vector_size]u8, vec), '#') or
strings.containsChar(&@as([strings.ascii_vector_size]u8, vec), '@'),
);
@@ -2014,7 +2014,7 @@ fn NewLexer_(
}
if (comptime Environment.allow_assert)
- std.debug.assert(rest.len == 0 or bun.isSliceInBuffer(rest, text));
+ bun.assert(rest.len == 0 or bun.isSliceInBuffer(rest, text));
while (rest.len > 0) {
const c = rest[0];
@@ -3312,11 +3312,11 @@ fn latin1IdentifierContinueLength(name: []const u8) usize {
if (std.simd.firstIndexOfValue(@as(Vec, @bitCast(other)), 1)) |first| {
if (comptime Environment.allow_assert) {
for (vec[0..first]) |c| {
- std.debug.assert(isIdentifierContinue(c));
+ bun.assert(isIdentifierContinue(c));
}
if (vec[first] < 128)
- std.debug.assert(!isIdentifierContinue(vec[first]));
+ bun.assert(!isIdentifierContinue(vec[first]));
}
return @as(usize, first) +
@@ -3405,8 +3405,8 @@ fn skipToInterestingCharacterInMultilineComment(text_: []const u8) ?u32 {
const V1x16 = strings.AsciiVectorU1;
const text_end_len = text.len & ~(@as(usize, strings.ascii_vector_size) - 1);
- std.debug.assert(text_end_len % strings.ascii_vector_size == 0);
- std.debug.assert(text_end_len <= text.len);
+ bun.assert(text_end_len % strings.ascii_vector_size == 0);
+ bun.assert(text_end_len <= text.len);
const text_end_ptr = text.ptr + text_end_len;
@@ -3422,8 +3422,8 @@ fn skipToInterestingCharacterInMultilineComment(text_: []const u8) ?u32 {
if (@reduce(.Max, any_significant) > 0) {
const bitmask = @as(u16, @bitCast(any_significant));
const first = @ctz(bitmask);
- std.debug.assert(first < strings.ascii_vector_size);
- std.debug.assert(text.ptr[first] == '*' or text.ptr[first] == '\r' or text.ptr[first] == '\n' or text.ptr[first] > 127);
+ bun.assert(first < strings.ascii_vector_size);
+ bun.assert(text.ptr[first] == '*' or text.ptr[first] == '\r' or text.ptr[first] == '\n' or text.ptr[first] > 127);
return @as(u32, @truncate(first + (@intFromPtr(text.ptr) - @intFromPtr(text_.ptr))));
}
text.ptr += strings.ascii_vector_size;
@@ -3450,7 +3450,7 @@ fn indexOfInterestingCharacterInStringLiteral(text_: []const u8, quote: u8) ?usi
if (@reduce(.Max, any_significant) > 0) {
const bitmask = @as(u16, @bitCast(any_significant));
const first = @ctz(bitmask);
- std.debug.assert(first < strings.ascii_vector_size);
+ bun.assert(first < strings.ascii_vector_size);
return first + (@intFromPtr(text.ptr) - @intFromPtr(text_.ptr));
}
text = text[strings.ascii_vector_size..];
diff --git a/src/js_parser.zig b/src/js_parser.zig
index 9baf96527358d4..3ff5cb1992e237 100644
--- a/src/js_parser.zig
+++ b/src/js_parser.zig
@@ -51,7 +51,7 @@ fn _disabledAssert(_: bool) void {
unreachable;
}
-const assert = if (Environment.allow_assert) std.debug.assert else _disabledAssert;
+const assert = if (Environment.allow_assert) bun.assert else _disabledAssert;
const debug = Output.scoped(.JSParser, false);
const ExprListLoc = struct {
list: ExprNodeList,
@@ -137,13 +137,13 @@ const JSXImport = enum {
pub fn runtimeImportNames(this: *const Symbols, buf: *[3]string) []const string {
var i: usize = 0;
if (this.jsxDEV != null) {
- std.debug.assert(this.jsx == null); // we should never end up with this in the same file
+ bun.assert(this.jsx == null); // we should never end up with this in the same file
buf[0] = "jsxDEV";
i += 1;
}
if (this.jsx != null) {
- std.debug.assert(this.jsxDEV == null); // we should never end up with this in the same file
+ bun.assert(this.jsxDEV == null); // we should never end up with this in the same file
buf[0] = "jsx";
i += 1;
}
@@ -2839,7 +2839,7 @@ pub const Parser = struct {
transform_only: bool = false,
pub fn hashForRuntimeTranspiler(this: *const Options, hasher: *std.hash.Wyhash, did_use_jsx: bool) void {
- std.debug.assert(!this.bundle);
+ bun.assert(!this.bundle);
if (did_use_jsx) {
if (this.jsx.parse) {
@@ -3467,7 +3467,7 @@ pub const Parser = struct {
.can_be_removed_if_unused = p.stmtsCanBeRemovedIfUnused(stmts_),
});
}
- std.debug.assert(remaining_stmts.len == 0);
+ bun.assert(remaining_stmts.len == 0);
}
if (p.commonjs_named_exports.count() > 0) {
@@ -3880,7 +3880,7 @@ pub const Parser = struct {
// 3) we are not bundling.
//
if (exports_kind == .esm and (uses_dirname or uses_filename)) {
- std.debug.assert(!p.options.bundle);
+ bun.assert(!p.options.bundle);
const count = @as(usize, @intFromBool(uses_dirname)) + @as(usize, @intFromBool(uses_filename));
var declared_symbols = DeclaredSymbol.List.initCapacity(p.allocator, count) catch unreachable;
var decls = p.allocator.alloc(G.Decl, count) catch unreachable;
@@ -5003,7 +5003,7 @@ fn NewParser_(
}
pub inline fn transposeRequireResolveKnownString(p: *P, arg: Expr) Expr {
- std.debug.assert(arg.data == .e_string);
+ bun.assert(arg.data == .e_string);
// Ignore calls to import() if the control flow is provably dead here.
// We don't want to spend time scanning the required files if they will
@@ -15553,7 +15553,7 @@ fn NewParser_(
}
fn recordDeclaredSymbol(p: *P, ref: Ref) anyerror!void {
- std.debug.assert(ref.isSymbol());
+ bun.assert(ref.isSymbol());
try p.declared_symbols.append(p.allocator, DeclaredSymbol{
.ref = ref,
.is_top_level = p.current_scope == p.module_scope,
@@ -17119,7 +17119,7 @@ fn NewParser_(
fn recordUsageOfRuntimeRequire(p: *P) void {
// target bun does not have __require
if (!p.options.features.use_import_meta_require) {
- std.debug.assert(p.options.features.allow_runtime);
+ bun.assert(p.options.features.allow_runtime);
p.ensureRequireSymbol();
p.recordUsage(p.runtimeIdentifierRef(logger.Loc.Empty, "__require"));
@@ -17127,7 +17127,7 @@ fn NewParser_(
}
inline fn valueForRequire(p: *P, loc: logger.Loc) Expr {
- std.debug.assert(!p.isSourceRuntime());
+ bun.assert(!p.isSourceRuntime());
return Expr{
.data = .{
.e_require_call_target = {},
@@ -18577,7 +18577,7 @@ fn NewParser_(
// This is to handle TS decorators, mostly.
var class_stmts = p.lowerClass(.{ .stmt = s2 });
- std.debug.assert(class_stmts[0].data == .s_class);
+ bun.assert(class_stmts[0].data == .s_class);
if (class_stmts.len > 1) {
data.value.stmt = class_stmts[0];
@@ -20342,7 +20342,7 @@ fn NewParser_(
.m_dot => |_refs| {
var refs = _refs;
- std.debug.assert(refs.items.len >= 2);
+ bun.assert(refs.items.len >= 2);
defer refs.deinit(p.allocator);
var dots = p.newExpr(
diff --git a/src/js_printer.zig b/src/js_printer.zig
index 663abe811e7f48..70b40041bda722 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -52,7 +52,7 @@ const last_high_surrogate = 0xDBFF;
const first_low_surrogate = 0xDC00;
const last_low_surrogate = 0xDFFF;
const CodepointIterator = @import("./string_immutable.zig").UnsignedCodepointIterator;
-const assert = std.debug.assert;
+const assert = bun.assert;
threadlocal var imported_module_ids_list: std.ArrayList(u32) = undefined;
threadlocal var imported_module_ids_list_unset: bool = true;
@@ -75,7 +75,7 @@ fn formatUnsignedIntegerBetween(comptime len: u16, buf: *[len]u8, val: u64) void
}
pub fn writeModuleId(comptime Writer: type, writer: Writer, module_id: u32) void {
- std.debug.assert(module_id != 0); // either module_id is forgotten or it should be disabled
+ bun.assert(module_id != 0); // either module_id is forgotten or it should be disabled
_ = writer.writeAll("$") catch unreachable;
std.fmt.formatInt(module_id, 16, .lower, .{}, writer) catch unreachable;
}
@@ -1019,7 +1019,7 @@ fn NewPrinter(
}
fn printBunJestImportStatement(p: *Printer, import: S.Import) void {
- comptime std.debug.assert(is_bun_platform);
+ comptime bun.assert(is_bun_platform);
switch (p.options.module_type) {
.cjs => {
@@ -1379,13 +1379,13 @@ fn NewPrinter(
}
pub fn printSymbol(p: *Printer, ref: Ref) void {
- std.debug.assert(!ref.isNull());
+ bun.assert(!ref.isNull());
const name = p.renamer.nameForSymbol(ref);
p.printIdentifier(name);
}
pub fn printClauseAlias(p: *Printer, alias: string) void {
- std.debug.assert(alias.len > 0);
+ bun.assert(alias.len > 0);
if (!strings.containsNonBmpCodePointOrIsInvalidIdentifier(alias)) {
p.printSpaceBeforeIdentifier();
@@ -2250,7 +2250,7 @@ fn NewPrinter(
// This is currently only used in Bun's runtime for CommonJS modules
// referencing import.meta
if (comptime Environment.allow_assert)
- std.debug.assert(p.options.module_type == .cjs);
+ bun.assert(p.options.module_type == .cjs);
p.printSymbol(p.options.import_meta_ref);
}
@@ -3618,7 +3618,7 @@ fn NewPrinter(
}
if (comptime is_json) {
- std.debug.assert(item.initializer == null);
+ bun.assert(item.initializer == null);
}
if (item.initializer) |initial| {
@@ -4463,7 +4463,7 @@ fn NewPrinter(
p.needs_semicolon = false;
},
.s_import => |s| {
- std.debug.assert(s.import_record_index < p.import_records.len);
+ bun.assert(s.import_record_index < p.import_records.len);
const record: *const ImportRecord = p.importRecord(s.import_record_index);
@@ -4967,7 +4967,7 @@ fn NewPrinter(
}
inline fn printModuleId(p: *Printer, module_id: u32) void {
- std.debug.assert(module_id != 0); // either module_id is forgotten or it should be disabled
+ bun.assert(module_id != 0); // either module_id is forgotten or it should be disabled
p.printModuleIdAssumeEnabled(module_id);
}
@@ -5554,7 +5554,7 @@ const FileWriterInternal = struct {
return @as([*]u8, @ptrCast(&buffer.list.items.ptr[buffer.list.items.len]));
}
pub fn advanceBy(this: *FileWriterInternal, count: u32) void {
- if (comptime Environment.isDebug) std.debug.assert(buffer.list.items.len + count <= buffer.list.capacity);
+ if (comptime Environment.isDebug) bun.assert(buffer.list.items.len + count <= buffer.list.capacity);
buffer.list.items = buffer.list.items.ptr[0 .. buffer.list.items.len + count];
if (count >= 2) {
@@ -5673,7 +5673,7 @@ pub const BufferWriter = struct {
return @as([*]u8, @ptrCast(&ctx.buffer.list.items.ptr[ctx.buffer.list.items.len]));
}
pub fn advanceBy(ctx: *BufferWriter, count: u32) void {
- if (comptime Environment.isDebug) std.debug.assert(ctx.buffer.list.items.len + count <= ctx.buffer.list.capacity);
+ if (comptime Environment.isDebug) bun.assert(ctx.buffer.list.items.len + count <= ctx.buffer.list.capacity);
ctx.buffer.list.items = ctx.buffer.list.items.ptr[0 .. ctx.buffer.list.items.len + count];
diff --git a/src/json_parser.zig b/src/json_parser.zig
index a077132af2e9b3..4d5dcf770c711a 100644
--- a/src/json_parser.zig
+++ b/src/json_parser.zig
@@ -25,7 +25,7 @@ const ExprNodeIndex = js_ast.ExprNodeIndex;
const ExprNodeList = js_ast.ExprNodeList;
const StmtNodeList = js_ast.StmtNodeList;
const BindingNodeList = js_ast.BindingNodeList;
-const assert = std.debug.assert;
+const assert = bun.assert;
const LocRef = js_ast.LocRef;
const S = js_ast.S;
@@ -94,9 +94,9 @@ fn newExpr(t: anytype, loc: logger.Loc) Expr {
if (comptime Type == E.Object) {
for (t.properties.slice()) |prop| {
// json should never have an initializer set
- std.debug.assert(prop.initializer == null);
- std.debug.assert(prop.key != null);
- std.debug.assert(prop.value != null);
+ bun.assert(prop.initializer == null);
+ bun.assert(prop.key != null);
+ bun.assert(prop.value != null);
}
}
}
@@ -765,7 +765,7 @@ pub fn ParseJSONUTF8(
}
var parser = try JSONParser.init(allocator, source.*, log);
- std.debug.assert(parser.source().contents.len > 0);
+ bun.assert(parser.source().contents.len > 0);
return try parser.parseExpr(false, true);
}
@@ -799,7 +799,7 @@ pub fn ParseJSONUTF8AlwaysDecode(
.always_decode_escape_sequences = true,
}).init(allocator, source.*, log);
if (comptime Environment.allow_assert) {
- std.debug.assert(parser.source().contents.len > 0);
+ bun.assert(parser.source().contents.len > 0);
}
return try parser.parseExpr(false, true);
diff --git a/src/linear_fifo.zig b/src/linear_fifo.zig
index 424deb20480a9c..685d0c9aa3a5ce 100644
--- a/src/linear_fifo.zig
+++ b/src/linear_fifo.zig
@@ -274,7 +274,7 @@ pub fn LinearFifo(
slice = self.writableSlice(0);
}
- std.debug.assert(slice.len >= size);
+ bun.assert(slice.len >= size);
return slice[0..size];
}
diff --git a/src/linker.zig b/src/linker.zig
index 4b07c71de7b82f..074ab79dfbeb0e 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -714,7 +714,7 @@ pub const Linker = struct {
.absolute_url => {
if (strings.eqlComptime(namespace, "node")) {
- if (comptime Environment.isDebug) std.debug.assert(strings.eqlComptime(source_path[0..5], "node:"));
+ if (comptime Environment.isDebug) bun.assert(strings.eqlComptime(source_path[0..5], "node:"));
return Fs.Path.init(try std.fmt.allocPrint(
linker.allocator,
diff --git a/src/logger.zig b/src/logger.zig
index be31238bc84455..a90ad6410432ec 100644
--- a/src/logger.zig
+++ b/src/logger.zig
@@ -17,7 +17,7 @@ const fs = @import("fs.zig");
const unicode = std.unicode;
const Ref = @import("./ast/base.zig").Ref;
const expect = std.testing.expect;
-const assert = std.debug.assert;
+const assert = bun.assert;
const ArrayList = std.ArrayList;
const StringBuilder = @import("./string_builder.zig");
const Index = @import("./ast/base.zig").Index;
@@ -1293,7 +1293,7 @@ pub const Source = struct {
return this.identifier_name;
}
- std.debug.assert(this.path.text.len > 0);
+ bun.assert(this.path.text.len > 0);
const name = try this.path.name.nonUniqueNameString(allocator);
this.identifier_name = name;
return name;
@@ -1408,7 +1408,7 @@ pub const Source = struct {
}
pub fn initErrorPosition(self: *const Source, offset_loc: Loc) ErrorPosition {
- std.debug.assert(!offset_loc.isEmpty());
+ bun.assert(!offset_loc.isEmpty());
var prev_code_point: i32 = 0;
const offset: usize = @min(@as(usize, @intCast(offset_loc.start)), @max(self.contents.len, 1) - 1);
diff --git a/src/main.zig b/src/main.zig
index c2b6093580c64b..bd4023ee20f4ea 100644
--- a/src/main.zig
+++ b/src/main.zig
@@ -12,7 +12,7 @@ const MainPanicHandler = panic_handler.NewPanicHandler(std.builtin.default_panic
pub const io_mode = .blocking;
comptime {
- std.debug.assert(builtin.target.cpu.arch.endian() == .little);
+ bun.assert(builtin.target.cpu.arch.endian() == .little);
}
pub fn panic(msg: []const u8, trace: ?*std.builtin.StackTrace, addr: ?usize) noreturn {
diff --git a/src/memory_allocator.zig b/src/memory_allocator.zig
index 24ebf864230ab8..4505e8a208d6f7 100644
--- a/src/memory_allocator.zig
+++ b/src/memory_allocator.zig
@@ -51,7 +51,7 @@ const c = struct {
}.mi_posix_memalign;
};
const Allocator = mem.Allocator;
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
const CAllocator = struct {
const malloc_size = c.malloc_size;
pub const supports_posix_memalign = true;
diff --git a/src/mimalloc_arena.zig b/src/mimalloc_arena.zig
index 030106a1825052..a32a0ce3d032be 100644
--- a/src/mimalloc_arena.zig
+++ b/src/mimalloc_arena.zig
@@ -6,7 +6,7 @@ const mimalloc = @import("./allocators/mimalloc.zig");
const Environment = @import("./env.zig");
const FeatureFlags = @import("./feature_flags.zig");
const Allocator = mem.Allocator;
-const assert = std.debug.assert;
+const assert = bun.assert;
const bun = @import("root").bun;
pub const GlobalArena = struct {
diff --git a/src/multi_array_list.zig b/src/multi_array_list.zig
index 70be24a1808734..f6482b7d114543 100644
--- a/src/multi_array_list.zig
+++ b/src/multi_array_list.zig
@@ -1,6 +1,6 @@
const std = @import("std");
const builtin = @import("builtin");
-const assert = std.debug.assert;
+const assert = @import("root").bun.assert;
const meta = std.meta;
const mem = std.mem;
const Allocator = mem.Allocator;
diff --git a/src/napi/napi.zig b/src/napi/napi.zig
index 0fcf760d6b2ee4..6f51c712a4b406 100644
--- a/src/napi/napi.zig
+++ b/src/napi/napi.zig
@@ -32,7 +32,7 @@ pub const Ref = opaque {
pub fn create(globalThis: *JSC.JSGlobalObject, value: JSValue) *Ref {
JSC.markBinding(@src());
var ref: *Ref = undefined;
- std.debug.assert(
+ bun.assert(
napi_create_reference(
globalThis,
value,
@@ -41,7 +41,7 @@ pub const Ref = opaque {
) == .ok,
);
if (comptime bun.Environment.isDebug) {
- std.debug.assert(ref.get() == value);
+ bun.assert(ref.get() == value);
}
return ref;
}
@@ -1184,19 +1184,19 @@ pub export fn napi_create_async_work(
}
pub export fn napi_delete_async_work(env: napi_env, work: *napi_async_work) napi_status {
log("napi_delete_async_work", .{});
- std.debug.assert(env == work.global);
+ bun.assert(env == work.global);
work.deinit();
return .ok;
}
pub export fn napi_queue_async_work(env: napi_env, work: *napi_async_work) napi_status {
log("napi_queue_async_work", .{});
- std.debug.assert(env == work.global);
+ bun.assert(env == work.global);
work.schedule();
return .ok;
}
pub export fn napi_cancel_async_work(env: napi_env, work: *napi_async_work) napi_status {
log("napi_cancel_async_work", .{});
- std.debug.assert(env == work.global);
+ bun.assert(env == work.global);
if (work.cancel()) {
return .ok;
}
@@ -1531,14 +1531,14 @@ pub export fn napi_release_threadsafe_function(func: napi_threadsafe_function, m
}
pub export fn napi_unref_threadsafe_function(env: napi_env, func: napi_threadsafe_function) napi_status {
log("napi_unref_threadsafe_function", .{});
- std.debug.assert(func.event_loop.global == env);
+ bun.assert(func.event_loop.global == env);
func.unref();
return .ok;
}
pub export fn napi_ref_threadsafe_function(env: napi_env, func: napi_threadsafe_function) napi_status {
log("napi_ref_threadsafe_function", .{});
- std.debug.assert(func.event_loop.global == env);
+ bun.assert(func.event_loop.global == env);
func.ref();
return .ok;
diff --git a/src/options.zig b/src/options.zig
index e0415a5c0381e1..f5663b8b331c0f 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -30,7 +30,7 @@ const MacroRemap = @import("./resolver/package_json.zig").MacroMap;
const DotEnv = @import("./env_loader.zig");
const ComptimeStringMap = @import("./comptime_string_map.zig").ComptimeStringMap;
-const assert = std.debug.assert;
+const assert = bun.assert;
pub const WriteDestination = enum {
stdout,
@@ -1188,7 +1188,7 @@ pub fn definesFromTransformOptions(
const framework = framework_env orelse break :load_env;
if (Environment.allow_assert) {
- std.debug.assert(framework.behavior != ._none);
+ bun.assert(framework.behavior != ._none);
}
behavior = framework.behavior;
@@ -2306,7 +2306,7 @@ pub const EntryPoint = struct {
}
fn normalizedPath(this: *const EntryPoint, allocator: std.mem.Allocator, toplevel_path: string) !string {
- std.debug.assert(std.fs.path.isAbsolute(this.path));
+ bun.assert(std.fs.path.isAbsolute(this.path));
var str = this.path;
if (strings.indexOf(str, toplevel_path)) |top| {
str = str[top + toplevel_path.len ..];
@@ -2645,7 +2645,7 @@ pub const PathTemplate = struct {
if (count == 0) {
end_len = @intFromPtr(c) - @intFromPtr(remain.ptr);
- std.debug.assert(end_len <= remain.len);
+ bun.assert(end_len <= remain.len);
break;
}
}
diff --git a/src/output.zig b/src/output.zig
index aeb45dd61e8ca7..bc4ed4f022f681 100644
--- a/src/output.zig
+++ b/src/output.zig
@@ -87,7 +87,7 @@ pub const Source = struct {
pub fn configureThread() void {
if (source_set) return;
- std.debug.assert(stdout_stream_set);
+ bun.debugAssert(stdout_stream_set);
source = Source.init(stdout_stream, stderr_stream);
}
@@ -360,17 +360,17 @@ pub noinline fn panic(comptime fmt: string, args: anytype) noreturn {
pub const WriterType: type = @TypeOf(Source.StreamType.quietWriter(undefined));
pub fn errorWriter() WriterType {
- std.debug.assert(source_set);
+ bun.debugAssert(source_set);
return source.error_stream.quietWriter();
}
pub fn errorStream() Source.StreamType {
- std.debug.assert(source_set);
+ bun.debugAssert(source_set);
return source.error_stream;
}
pub fn writer() WriterType {
- std.debug.assert(source_set);
+ bun.debugAssert(source_set);
return source.stream.quietWriter();
}
@@ -533,7 +533,7 @@ pub fn debug(comptime fmt: string, args: anytype) void {
}
pub inline fn _debug(comptime fmt: string, args: anytype) void {
- std.debug.assert(source_set);
+ bun.debugAssert(source_set);
println(fmt, args);
}
@@ -544,8 +544,7 @@ pub noinline fn print(comptime fmt: string, args: anytype) callconv(std.builtin.
std.fmt.format(source.stream.writer(), fmt, args) catch unreachable;
root.console_log(root.Uint8Array.fromSlice(source.stream.buffer[0..source.stream.pos]));
} else {
- if (comptime Environment.allow_assert)
- std.debug.assert(source_set);
+ bun.debugAssert(source_set);
// There's not much we can do if this errors. Especially if it's something like BrokenPipe.
if (enable_buffering) {
@@ -917,7 +916,7 @@ pub inline fn err(error_name: anytype, comptime fmt: []const u8, args: anytype)
// enum literals
if (info == .EnumLiteral) {
const tag = @tagName(info);
- comptime std.debug.assert(tag.len > 0); // how?
+ comptime bun.assert(tag.len > 0); // how?
if (tag[0] != 'E') break :display_name .{ "E" ++ tag, true };
break :display_name .{ tag, true };
}
@@ -954,7 +953,7 @@ pub fn enableScopedDebugWriter() void {
extern "c" fn getpid() c_int;
pub fn initScopedDebugWriterAtStartup() void {
- std.debug.assert(source_set);
+ bun.debugAssert(source_set);
if (bun.getenvZ("BUN_DEBUG")) |path| {
if (path.len > 0 and !strings.eql(path, "0") and !strings.eql(path, "false")) {
diff --git a/src/pool.zig b/src/pool.zig
index e4619b648a7d6b..75521a3c2732ef 100644
--- a/src/pool.zig
+++ b/src/pool.zig
@@ -157,7 +157,7 @@ pub fn ObjectPool(
pub fn push(allocator: std.mem.Allocator, pooled: Type) void {
if (comptime @import("./env.zig").allow_assert)
- std.debug.assert(!full());
+ @import("root").bun.assert(!full());
const new_node = allocator.create(LinkedList.Node) catch unreachable;
new_node.* = LinkedList.Node{
diff --git a/src/ref_count.zig b/src/ref_count.zig
index 2cc5c09fd300d3..996ea2f73dcfbb 100644
--- a/src/ref_count.zig
+++ b/src/ref_count.zig
@@ -1,4 +1,5 @@
const std = @import("std");
+const bun = @import("root").bun;
pub fn RefCount(comptime TypeName: type, comptime deinit_on_zero: bool) type {
return struct {
@@ -24,7 +25,7 @@ pub fn RefCount(comptime TypeName: type, comptime deinit_on_zero: bool) type {
/// Get the value & increment the reference count.
pub inline fn get(this: *@This()) *Type {
- std.debug.assert(this.count >= 0);
+ bun.assert(this.count >= 0);
this.count += 1;
return this.leak();
@@ -66,7 +67,7 @@ pub fn RefCount(comptime TypeName: type, comptime deinit_on_zero: bool) type {
pub inline fn deref(this: *@This()) void {
this.count -= 1;
- std.debug.assert(this.count >= 0);
+ bun.assert(this.count >= 0);
if (comptime deinit_on_zero) {
if (this.count <= 0) {
diff --git a/src/renamer.zig b/src/renamer.zig
index ea3196a802ad52..b12e944439f335 100644
--- a/src/renamer.zig
+++ b/src/renamer.zig
@@ -587,12 +587,12 @@ pub const NumberRenamer = struct {
var value_iter = scope.members.valueIterator();
while (value_iter.next()) |value_ref| {
if (comptime Environment.allow_assert)
- std.debug.assert(!value_ref.ref.isSourceContentsSlice());
+ bun.assert(!value_ref.ref.isSourceContentsSlice());
remaining[0] = value_ref.ref.innerIndex();
remaining = remaining[1..];
}
- std.debug.assert(remaining.len == 0);
+ bun.assert(remaining.len == 0);
std.sort.pdq(u32, sorted.items, {}, std.sort.asc(u32));
for (sorted.items) |inner_index| {
@@ -616,7 +616,7 @@ pub const NumberRenamer = struct {
// Ignore function argument scopes
if (scope.kind == .function_args and scope.children.len == 1) {
scope = scope.children.ptr[0];
- std.debug.assert(scope.kind == .function_body);
+ bun.assert(scope.kind == .function_body);
}
while (true) {
@@ -635,7 +635,7 @@ pub const NumberRenamer = struct {
scope = scope.children.ptr[0];
if (scope.kind == .function_args and scope.children.len == 1) {
scope = scope.children.ptr[0];
- std.debug.assert(scope.kind == .function_body);
+ bun.assert(scope.kind == .function_body);
}
} else {
break;
@@ -696,7 +696,7 @@ pub const NumberRenamer = struct {
pub fn find(this: *NumberScope, name: []const u8) NameUse {
// This version doesn't allocate
if (comptime Environment.allow_assert)
- std.debug.assert(JSLexer.isIdentifier(name));
+ bun.assert(JSLexer.isIdentifier(name));
// avoid rehashing the same string over for each scope
const ctx = bun.StringHashMapContext.pre(name);
diff --git a/src/resolver/data_url.zig b/src/resolver/data_url.zig
index 48cb582fdffbe7..002044ce699e60 100644
--- a/src/resolver/data_url.zig
+++ b/src/resolver/data_url.zig
@@ -31,7 +31,7 @@ pub const PercentEncoding = struct {
/// returns true if str starts with a valid path character or a percent encoded octet
pub fn isPchar(str: []const u8) bool {
- if (comptime Environment.allow_assert) std.debug.assert(str.len > 0);
+ if (comptime Environment.allow_assert) bun.assert(str.len > 0);
return switch (str[0]) {
'a'...'z', 'A'...'Z', '0'...'9', '-', '.', '_', '~', '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', ':', '@' => true,
'%' => str.len >= 3 and isHex(str[1]) and isHex(str[2]),
diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig
index e91bb39b87e385..18c3916005d650 100644
--- a/src/resolver/resolve_path.zig
+++ b/src/resolver/resolve_path.zig
@@ -709,7 +709,7 @@ pub fn normalizeStringGenericTZ(
//
// since it is theoretically possible to get here in release
// we will not do this check in release.
- std.debug.assert(!strings.hasPrefixComptimeType(T, path_, comptime strings.literal(T, ":\\")));
+ assert(!strings.hasPrefixComptimeType(T, path_, comptime strings.literal(T, ":\\")));
}
var buf_i: usize = 0;
@@ -875,7 +875,7 @@ pub fn normalizeStringGenericTZ(
const result = if (options.zero_terminate) buf[0..buf_i :0] else buf[0..buf_i];
if (bun.Environment.allow_assert and isWindows) {
- std.debug.assert(!strings.hasPrefixComptimeType(T, result, comptime strings.literal(T, "\\:\\")));
+ assert(!strings.hasPrefixComptimeType(T, result, comptime strings.literal(T, "\\:\\")));
}
return result;
@@ -1202,7 +1202,7 @@ pub fn joinZ(_parts: anytype, comptime _platform: Platform) [:0]const u8 {
pub fn joinZBuf(buf: []u8, _parts: anytype, comptime _platform: Platform) [:0]const u8 {
const joined = joinStringBuf(buf[0 .. buf.len - 1], _parts, _platform);
- std.debug.assert(bun.isSliceInBuffer(joined, buf));
+ assert(bun.isSliceInBuffer(joined, buf));
const start_offset = @intFromPtr(joined.ptr) - @intFromPtr(buf.ptr);
buf[joined.len + start_offset] = 0;
return buf[start_offset..][0..joined.len :0];
@@ -1394,7 +1394,7 @@ fn _joinAbsStringBufWindows(
buf: []u8,
parts: []const []const u8,
) ReturnType {
- std.debug.assert(std.fs.path.isAbsoluteWindows(cwd));
+ assert(std.fs.path.isAbsoluteWindows(cwd));
if (parts.len == 0) {
if (comptime is_sentinel) {
@@ -1451,7 +1451,7 @@ fn _joinAbsStringBufWindows(
};
if (set_cwd.len > 0)
- std.debug.assert(isSepAny(set_cwd[0]));
+ assert(isSepAny(set_cwd[0]));
var temp_buf: [bun.MAX_PATH_BYTES * 2]u8 = undefined;
@@ -2146,17 +2146,17 @@ pub const PosixToWinNormalizer = struct {
source_dir: []const u8,
maybe_posix_path: []const u8,
) []const u8 {
- std.debug.assert(std.fs.path.isAbsoluteWindows(maybe_posix_path));
+ assert(std.fs.path.isAbsoluteWindows(maybe_posix_path));
if (bun.Environment.isWindows) {
const root = windowsFilesystemRoot(maybe_posix_path);
if (root.len == 1) {
- std.debug.assert(isSepAny(root[0]));
+ assert(isSepAny(root[0]));
if (bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, maybe_posix_path)) {
const source_root = windowsFilesystemRoot(source_dir);
@memcpy(buf[0..source_root.len], source_root);
@memcpy(buf[source_root.len..][0 .. maybe_posix_path.len - 1], maybe_posix_path[1..]);
const res = buf[0 .. source_root.len + maybe_posix_path.len - 1];
- std.debug.assert(!bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, res));
+ assert(!bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, res));
return res;
}
}
@@ -2168,20 +2168,20 @@ pub const PosixToWinNormalizer = struct {
buf: *Buf,
maybe_posix_path: []const u8,
) ![]const u8 {
- std.debug.assert(std.fs.path.isAbsoluteWindows(maybe_posix_path));
+ assert(std.fs.path.isAbsoluteWindows(maybe_posix_path));
if (bun.Environment.isWindows) {
const root = windowsFilesystemRoot(maybe_posix_path);
if (root.len == 1) {
- std.debug.assert(isSepAny(root[0]));
+ assert(isSepAny(root[0]));
if (bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, maybe_posix_path)) {
const cwd = try std.os.getcwd(buf);
- std.debug.assert(cwd.ptr == buf.ptr);
+ assert(cwd.ptr == buf.ptr);
const source_root = windowsFilesystemRoot(cwd);
- std.debug.assert(source_root.ptr == source_root.ptr);
+ assert(source_root.ptr == source_root.ptr);
@memcpy(buf[source_root.len..][0 .. maybe_posix_path.len - 1], maybe_posix_path[1..]);
const res = buf[0 .. source_root.len + maybe_posix_path.len - 1];
- std.debug.assert(!bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, res));
+ assert(!bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, res));
return res;
}
}
@@ -2194,21 +2194,21 @@ pub const PosixToWinNormalizer = struct {
buf: *bun.PathBuffer,
maybe_posix_path: []const u8,
) ![:0]u8 {
- std.debug.assert(std.fs.path.isAbsoluteWindows(maybe_posix_path));
+ assert(std.fs.path.isAbsoluteWindows(maybe_posix_path));
if (bun.Environment.isWindows) {
const root = windowsFilesystemRoot(maybe_posix_path);
if (root.len == 1) {
- std.debug.assert(isSepAny(root[0]));
+ assert(isSepAny(root[0]));
if (bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, maybe_posix_path)) {
const cwd = try std.os.getcwd(buf);
- std.debug.assert(cwd.ptr == buf.ptr);
+ assert(cwd.ptr == buf.ptr);
const source_root = windowsFilesystemRoot(cwd);
- std.debug.assert(source_root.ptr == source_root.ptr);
+ assert(source_root.ptr == source_root.ptr);
@memcpy(buf[source_root.len..][0 .. maybe_posix_path.len - 1], maybe_posix_path[1..]);
buf[source_root.len + maybe_posix_path.len - 1] = 0;
const res = buf[0 .. source_root.len + maybe_posix_path.len - 1 :0];
- std.debug.assert(!bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, res));
+ assert(!bun.strings.isWindowsAbsolutePathMissingDriveLetter(u8, res));
return res;
}
}
@@ -2282,3 +2282,5 @@ pub fn posixToPlatformInPlace(comptime T: type, path_buffer: []T) void {
path_buffer[index] = std.fs.path.sep;
}
}
+
+const assert = bun.assert;
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index d14542b152c29b..92afb977cec59a 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -55,8 +55,8 @@ pub fn isPackagePath(path: string) bool {
pub fn isPackagePathNotAbsolute(non_absolute_path: string) bool {
if (Environment.allow_assert) {
- std.debug.assert(!std.fs.path.isAbsolute(non_absolute_path));
- std.debug.assert(!strings.startsWith(non_absolute_path, "/"));
+ assert(!std.fs.path.isAbsolute(non_absolute_path));
+ assert(!strings.startsWith(non_absolute_path, "/"));
}
return !strings.startsWith(non_absolute_path, "./") and
@@ -1084,7 +1084,7 @@ pub const Resolver = struct {
bun.openFileForPath(span);
if (!store_fd) {
- std.debug.assert(bun.FDTag.get(file.handle) == .none);
+ assert(bun.FDTag.get(file.handle) == .none);
out = try bun.getFdPath(file.handle, &buf);
file.close();
query.entry.cache.fd = .zero;
@@ -1137,7 +1137,7 @@ pub const Resolver = struct {
kind: ast.ImportKind,
global_cache: GlobalCache,
) Result.Union {
- std.debug.assert(std.fs.path.isAbsolute(source_dir));
+ assert(std.fs.path.isAbsolute(source_dir));
var import_path = import_path_;
@@ -2096,7 +2096,7 @@ pub const Resolver = struct {
dir_path_maybe_trail_slash: string,
package_id: Install.PackageID,
) !?*DirInfo {
- std.debug.assert(r.package_manager != null);
+ assert(r.package_manager != null);
const dir_path = strings.pathWithoutTrailingSlashOne(dir_path_maybe_trail_slash);
@@ -2214,7 +2214,7 @@ pub const Resolver = struct {
var pm = r.getPackageManager();
if (comptime Environment.allow_assert) {
// we should never be trying to resolve a dependency that is already resolved
- std.debug.assert(pm.lockfile.resolve(esm.name, version) == null);
+ assert(pm.lockfile.resolve(esm.name, version) == null);
}
// Add the containing package to the lockfile
@@ -2578,7 +2578,7 @@ pub const Resolver = struct {
input_path = r.fs.normalizeBuf(&win32_normalized_dir_info_cache_buf, input_path);
}
- std.debug.assert(std.fs.path.isAbsolute(input_path));
+ assert(std.fs.path.isAbsolute(input_path));
const path_without_trailing_slash = strings.pathWithoutTrailingSlashOne(input_path);
assertValidCacheKey(path_without_trailing_slash);
@@ -2670,7 +2670,7 @@ pub const Resolver = struct {
}
var queue_slice: []DirEntryResolveQueueItem = bufs(.dir_entry_paths_to_resolve)[0..@as(usize, @intCast(i))];
- if (Environment.allow_assert) std.debug.assert(queue_slice.len > 0);
+ if (Environment.allow_assert) assert(queue_slice.len > 0);
var open_dir_count: usize = 0;
// When this function halts, any item not processed means it's not found.
@@ -3545,7 +3545,7 @@ pub const Resolver = struct {
}
if (Environment.allow_assert) {
- std.debug.assert(std.fs.path.isAbsolute(file.path));
+ assert(std.fs.path.isAbsolute(file.path));
}
return MatchResult{
@@ -4140,7 +4140,7 @@ pub const Dirname = struct {
const root = brk: {
if (Environment.isWindows) {
const root = ResolvePath.windowsFilesystemRoot(path);
- std.debug.assert(root.len > 0);
+ assert(root.len > 0);
break :brk root;
}
break :brk "/";
@@ -4228,3 +4228,5 @@ comptime {
_ = Resolver.Resolver__propForRequireMainPaths;
}
}
+
+const assert = bun.assert;
diff --git a/src/resolver/tsconfig_json.zig b/src/resolver/tsconfig_json.zig
index a7f4fe9db90b6b..4595b08ba6b12c 100644
--- a/src/resolver/tsconfig_json.zig
+++ b/src/resolver/tsconfig_json.zig
@@ -321,14 +321,14 @@ pub const TSConfigJSON = struct {
}
if (Environment.isDebug and has_base_url) {
- std.debug.assert(result.base_url.len > 0);
+ assert(result.base_url.len > 0);
}
const _result = allocator.create(TSConfigJSON) catch unreachable;
_result.* = result;
if (Environment.isDebug and has_base_url) {
- std.debug.assert(_result.base_url.len > 0);
+ assert(_result.base_url.len > 0);
}
return _result;
}
@@ -443,3 +443,5 @@ pub const TSConfigJSON = struct {
return false;
}
};
+
+const assert = bun.assert;
diff --git a/src/router.zig b/src/router.zig
index 2a11f738d50858..1d903befbdedf2 100644
--- a/src/router.zig
+++ b/src/router.zig
@@ -467,7 +467,7 @@ const RouteLoader = struct {
// length is extended by one
// entry.dir is a string with a trailing slash
if (comptime Environment.isDebug) {
- std.debug.assert(bun.path.isSepAny(entry.dir[base_dir.len - 1]));
+ bun.assert(bun.path.isSepAny(entry.dir[base_dir.len - 1]));
}
const public_dir = entry.dir.ptr[base_dir.len - 1 .. entry.dir.len];
@@ -529,7 +529,7 @@ pub const TinyPtr = packed struct {
const right = @intFromPtr(in.ptr) + in.len;
const end = @intFromPtr(parent.ptr) + parent.len;
if (comptime Environment.isDebug) {
- std.debug.assert(end < right);
+ bun.assert(end < right);
}
const length = @max(end, right) - right;
@@ -743,8 +743,8 @@ pub const Route = struct {
match_name = name[1..];
}
- if (Environment.allow_assert) std.debug.assert(match_name[0] != '/');
- if (Environment.allow_assert) std.debug.assert(name[0] == '/');
+ if (Environment.allow_assert) bun.assert(match_name[0] != '/');
+ if (Environment.allow_assert) bun.assert(name[0] == '/');
} else {
name = Route.index_route_name;
match_name = Route.index_route_name;
@@ -788,11 +788,11 @@ pub const Route = struct {
PathString.init(abs_path_str);
if (comptime Environment.allow_assert and Environment.isWindows) {
- std.debug.assert(!strings.containsChar(name, '\\'));
- std.debug.assert(!strings.containsChar(public_path, '\\'));
- std.debug.assert(!strings.containsChar(match_name, '\\'));
- std.debug.assert(!strings.containsChar(abs_path, '\\'));
- std.debug.assert(!strings.containsChar(entry.base(), '\\'));
+ bun.assert(!strings.containsChar(name, '\\'));
+ bun.assert(!strings.containsChar(public_path, '\\'));
+ bun.assert(!strings.containsChar(match_name, '\\'));
+ bun.assert(!strings.containsChar(abs_path, '\\'));
+ bun.assert(!strings.containsChar(entry.base(), '\\'));
}
return Route{
@@ -842,7 +842,7 @@ pub fn match(app: *Router, comptime Server: type, server: Server, comptime Reque
return;
}
- std.debug.assert(route.path.len > 0);
+ bun.assert(route.path.len > 0);
if (comptime @hasField(std.meta.Child(Server), "watcher")) {
if (server.watcher.watchloop_handle == null) {
@@ -940,7 +940,7 @@ pub const MockServer = struct {
fn makeTest(cwd_path: string, data: anytype) !void {
Output.initTest();
- std.debug.assert(cwd_path.len > 1 and !strings.eql(cwd_path, "/") and !strings.endsWith(cwd_path, "bun"));
+ bun.assert(cwd_path.len > 1 and !strings.eql(cwd_path, "/") and !strings.endsWith(cwd_path, "bun"));
const bun_tests_dir = try std.fs.cwd().makeOpenPath("bun-test-scratch", .{});
bun_tests_dir.deleteTree(cwd_path) catch {};
@@ -1217,7 +1217,7 @@ const Pattern = struct {
var count: u16 = 0;
var offset: RoutePathInt = 0;
- std.debug.assert(input.len > 0);
+ bun.assert(input.len > 0);
var kind: u4 = @intFromEnum(Tag.static);
const end = @as(u32, @truncate(input.len - 1));
while (offset < end) {
diff --git a/src/runtime.zig b/src/runtime.zig
index 36343893399497..6fa481be5a5a94 100644
--- a/src/runtime.zig
+++ b/src/runtime.zig
@@ -298,7 +298,7 @@ pub const Runtime = struct {
};
pub fn hashForRuntimeTranspiler(this: *const Features, hasher: *std.hash.Wyhash) void {
- std.debug.assert(this.runtime_transpiler_cache != null);
+ bun.assert(this.runtime_transpiler_cache != null);
var bools: [std.meta.fieldNames(@TypeOf(hash_fields_for_runtime_transpiler)).len]bool = undefined;
inline for (hash_fields_for_runtime_transpiler, 0..) |field, i| {
diff --git a/src/sha.zig b/src/sha.zig
index 9c6e3854a7f6df..d12bcfe86cde7b 100644
--- a/src/sha.zig
+++ b/src/sha.zig
@@ -15,7 +15,7 @@ fn NewHasher(comptime digest_size: comptime_int, comptime ContextType: type, com
.hasher = undefined,
};
- std.debug.assert(Init(&this.hasher) == 1);
+ bun.assert(Init(&this.hasher) == 1);
return this;
}
@@ -26,12 +26,12 @@ fn NewHasher(comptime digest_size: comptime_int, comptime ContextType: type, com
pub fn update(this: *@This(), data: []const u8) void {
@setRuntimeSafety(false);
- std.debug.assert(Update(&this.hasher, data.ptr, data.len) == 1);
+ bun.assert(Update(&this.hasher, data.ptr, data.len) == 1);
}
pub fn final(this: *@This(), out: *Digest) void {
@setRuntimeSafety(false);
- std.debug.assert(Final(out, &this.hasher) == 1);
+ bun.assert(Final(out, &this.hasher) == 1);
}
};
}
@@ -54,7 +54,7 @@ fn NewEVP(
BoringSSL.EVP_MD_CTX_init(&this.ctx);
- std.debug.assert(BoringSSL.EVP_DigestInit(&this.ctx, md) == 1);
+ bun.assert(BoringSSL.EVP_DigestInit(&this.ctx, md) == 1);
return this;
}
@@ -62,15 +62,15 @@ fn NewEVP(
pub fn hash(bytes: []const u8, out: *Digest, engine: *BoringSSL.ENGINE) void {
const md = @field(BoringSSL, MDName)();
- std.debug.assert(BoringSSL.EVP_Digest(bytes.ptr, bytes.len, out, null, md, engine) == 1);
+ bun.assert(BoringSSL.EVP_Digest(bytes.ptr, bytes.len, out, null, md, engine) == 1);
}
pub fn update(this: *@This(), data: []const u8) void {
- std.debug.assert(BoringSSL.EVP_DigestUpdate(&this.ctx, data.ptr, data.len) == 1);
+ bun.assert(BoringSSL.EVP_DigestUpdate(&this.ctx, data.ptr, data.len) == 1);
}
pub fn final(this: *@This(), out: *Digest) void {
- std.debug.assert(BoringSSL.EVP_DigestFinal(&this.ctx, out, null) == 1);
+ bun.assert(BoringSSL.EVP_DigestFinal(&this.ctx, out, null) == 1);
}
pub fn deinit(this: *@This()) void {
diff --git a/src/shell/braces.zig b/src/shell/braces.zig
index 6fa8583aa72015..e8da1aa7bd5eb9 100644
--- a/src/shell/braces.zig
+++ b/src/shell/braces.zig
@@ -280,7 +280,7 @@ fn expandFlat(
.open => |expansion_variants| {
depth += 1;
if (bun.Environment.allow_assert) {
- std.debug.assert(expansion_variants.end - expansion_variants.idx >= 1);
+ assert(expansion_variants.end - expansion_variants.idx >= 1);
}
var variants = expansion_table[expansion_variants.idx..expansion_variants.end];
@@ -418,7 +418,7 @@ pub const Parser = struct {
}
fn expect(self: *Parser, toktag: TokenTag) Token {
- std.debug.assert(toktag == @as(TokenTag, self.peek()));
+ assert(toktag == @as(TokenTag, self.peek()));
if (self.check(toktag)) {
return self.advance();
}
@@ -586,7 +586,7 @@ pub fn buildExpansionTable(
if (bun.Environment.allow_assert) {
for (table.items[0..], 0..) |variant, kdjsd| {
_ = kdjsd;
- std.debug.assert(variant.start != 0 and variant.end != 0);
+ assert(variant.start != 0 and variant.end != 0);
}
}
}
@@ -727,7 +727,7 @@ pub fn NewLexer(comptime encoding: Encoding) type {
fn rollbackBraces(self: *@This(), starting_idx: u32) !void {
if (bun.Environment.allow_assert) {
const first = &self.tokens.items[starting_idx];
- std.debug.assert(first.* == .open);
+ assert(first.* == .open);
}
var braces: u8 = 0;
@@ -804,3 +804,5 @@ pub fn NewLexer(comptime encoding: Encoding) type {
}
};
}
+
+const assert = bun.assert;
diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig
index ebc73ddf530e4b..ef81a9d9866c6f 100644
--- a/src/shell/interpreter.zig
+++ b/src/shell/interpreter.zig
@@ -65,13 +65,7 @@ pub fn OOM(e: anyerror) noreturn {
const log = bun.Output.scoped(.SHELL, false);
-pub fn assert(cond: bool, comptime msg: []const u8) void {
- if (bun.Environment.allow_assert) {
- std.debug.assert(cond);
- } else {
- @panic("Assertion failed: " ++ msg);
- }
-}
+const assert = bun.assert;
pub const ExitCode = u16;
@@ -207,7 +201,7 @@ const CowFd = struct {
}
pub fn deinit(this: *CowFd) void {
- std.debug.assert(this.refcount == 0);
+ assert(this.refcount == 0);
_ = bun.sys.close(this.__fd);
bun.default_allocator.destroy(this);
}
@@ -320,7 +314,7 @@ pub const IO = struct {
comptime fmt_: []const u8,
args: anytype,
) void {
- if (bun.Environment.allow_assert) std.debug.assert(this.* == .fd);
+ if (bun.Environment.allow_assert) assert(this.* == .fd);
this.fd.writer.enqueueFmtBltn(ptr, this.fd.captured, kind, fmt_, args);
}
@@ -878,8 +872,8 @@ pub const Interpreter = struct {
this.__cwd.appendSlice(new_cwd[0 .. new_cwd.len + 1]) catch bun.outOfMemory();
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.__cwd.items[this.__cwd.items.len -| 1] == 0);
- std.debug.assert(this.__prev_cwd.items[this.__prev_cwd.items.len -| 1] == 0);
+ assert(this.__cwd.items[this.__cwd.items.len -| 1] == 0);
+ assert(this.__prev_cwd.items[this.__prev_cwd.items.len -| 1] == 0);
}
this.cwd_fd = new_cwd_fd;
@@ -995,7 +989,7 @@ pub const Interpreter = struct {
&lex_result,
) catch |err| {
if (err == shell.ParseError.Lex) {
- std.debug.assert(lex_result != null);
+ assert(lex_result != null);
const str = lex_result.?.combineErrors(arena.allocator());
globalThis.throwPretty("{s}", .{str});
return null;
@@ -1003,7 +997,7 @@ pub const Interpreter = struct {
if (parser) |*p| {
if (bun.Environment.allow_assert) {
- std.debug.assert(p.errors.items.len > 0);
+ assert(p.errors.items.len > 0);
}
const errstr = p.combineErrors();
globalThis.throwPretty("{s}", .{errstr});
@@ -1139,7 +1133,7 @@ pub const Interpreter = struct {
cwd_arr.appendSlice(cwd[0 .. cwd.len + 1]) catch bun.outOfMemory();
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(cwd_arr.items[cwd_arr.items.len -| 1] == 0);
+ assert(cwd_arr.items[cwd_arr.items.len -| 1] == 0);
}
log("Duping stdin", .{});
@@ -1210,7 +1204,7 @@ pub const Interpreter = struct {
&out_lex_result,
) catch |err| {
if (err == bun.shell.ParseError.Lex) {
- std.debug.assert(out_lex_result != null);
+ assert(out_lex_result != null);
const str = out_lex_result.?.combineErrors(arena.allocator());
bun.Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ std.fs.path.basename(path), str });
bun.Global.exit(1);
@@ -1273,7 +1267,7 @@ pub const Interpreter = struct {
var out_lex_result: ?bun.shell.LexResult = null;
const script = ThisInterpreter.parse(&arena, src, jsobjs, &[_]bun.String{}, &out_parser, &out_lex_result) catch |err| {
if (err == bun.shell.ParseError.Lex) {
- std.debug.assert(out_lex_result != null);
+ assert(out_lex_result != null);
const str = out_lex_result.?.combineErrors(arena.allocator());
bun.Output.prettyErrorln("error: Failed to run script {s} due to error {s}", .{ path_for_errors, str });
bun.Global.exit(1);
@@ -1727,7 +1721,7 @@ pub const Interpreter = struct {
pub fn pushResultSlice(this: *Result, buf: [:0]const u8) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(buf[buf.len] == 0);
+ assert(buf[buf.len] == 0);
}
switch (this.*) {
@@ -1747,7 +1741,7 @@ pub const Interpreter = struct {
pub fn pushResult(this: *Result, buf: *std.ArrayList(u8)) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(buf.items[buf.items.len - 1] == 0);
+ assert(buf.items[buf.items.len - 1] == 0);
}
switch (this.*) {
@@ -1807,8 +1801,8 @@ pub const Interpreter = struct {
pub fn start(this: *Expansion) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.child_state == .idle);
- std.debug.assert(this.word_idx == 0);
+ assert(this.child_state == .idle);
+ assert(this.word_idx == 0);
}
this.state = .normal;
@@ -1853,7 +1847,7 @@ pub const Interpreter = struct {
}
// Shouldn't fall through to here
- std.debug.assert(this.word_idx >= this.node.atomsLen());
+ assert(this.word_idx >= this.node.atomsLen());
return;
},
.braces => {
@@ -1868,7 +1862,7 @@ pub const Interpreter = struct {
var expanded_strings = brk: {
const stack_max = comptime 16;
comptime {
- std.debug.assert(@sizeOf([]std.ArrayList(u8)) * stack_max <= 256);
+ assert(@sizeOf([]std.ArrayList(u8)) * stack_max <= 256);
}
var maybe_stack_alloc = std.heap.stackFallback(@sizeOf([]std.ArrayList(u8)) * stack_max, this.base.interpreter.allocator);
const expanded_strings = maybe_stack_alloc.get().alloc(std.ArrayList(u8), expansion_count) catch bun.outOfMemory();
@@ -2104,14 +2098,14 @@ pub const Interpreter = struct {
fn childDone(this: *Expansion, child: ChildPtr, exit_code: ExitCode) void {
_ = exit_code;
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state != .done and this.state != .err);
- std.debug.assert(this.child_state != .idle);
+ assert(this.state != .done and this.state != .err);
+ assert(this.child_state != .idle);
}
// Command substitution
if (child.ptr.is(Script)) {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.child_state == .cmd_subst);
+ assert(this.child_state == .cmd_subst);
}
const stdout = this.child_state.cmd_subst.cmd.base.shell.buffered_stdout().slice();
@@ -2135,7 +2129,7 @@ pub const Interpreter = struct {
fn onGlobWalkDone(this: *Expansion, task: *ShellGlobTask) void {
log("{} onGlobWalkDone", .{this});
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.child_state == .glob);
+ assert(this.child_state == .glob);
}
if (task.err) |*err| {
@@ -2801,9 +2795,9 @@ pub const Interpreter = struct {
pub fn start(this: *Stmt) void {
if (bun.Environment.allow_assert) {
- std.debug.assert(this.idx == 0);
- std.debug.assert(this.last_exit_code == null);
- std.debug.assert(this.currently_executing == null);
+ assert(this.idx == 0);
+ assert(this.last_exit_code == null);
+ assert(this.currently_executing == null);
}
this.next();
}
@@ -2933,9 +2927,9 @@ pub const Interpreter = struct {
fn start(this: *Binary) void {
log("binary start {x} ({s})", .{ @intFromPtr(this), @tagName(this.node.op) });
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.left == null);
- std.debug.assert(this.right == null);
- std.debug.assert(this.currently_executing == null);
+ assert(this.left == null);
+ assert(this.right == null);
+ assert(this.currently_executing == null);
}
this.currently_executing = this.makeChild(true);
@@ -2997,8 +2991,8 @@ pub const Interpreter = struct {
pub fn childDone(this: *Binary, child: ChildPtr, exit_code: ExitCode) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.left == null or this.right == null);
- std.debug.assert(this.currently_executing != null);
+ assert(this.left == null or this.right == null);
+ assert(this.currently_executing != null);
}
log("binary child done {x} ({s}) {s}", .{ @intFromPtr(this), @tagName(this.node.op), if (this.left == null) "left" else "right" });
@@ -3198,7 +3192,7 @@ pub const Interpreter = struct {
};
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.exited_count == 0);
+ assert(this.exited_count == 0);
}
log("pipeline start {x} (count={d})", .{ @intFromPtr(this), this.node.items.len });
if (this.node.items.len == 0) {
@@ -3208,7 +3202,7 @@ pub const Interpreter = struct {
}
for (cmds) |*cmd_or_result| {
- std.debug.assert(cmd_or_result.* == .cmd);
+ assert(cmd_or_result.* == .cmd);
log("Pipeline start cmd", .{});
var cmd = cmd_or_result.cmd;
cmd.call("start", .{}, void);
@@ -3217,7 +3211,7 @@ pub const Interpreter = struct {
pub fn onIOWriterChunk(this: *Pipeline, _: usize, err: ?JSC.SystemError) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state == .waiting_write_err);
+ assert(this.state == .waiting_write_err);
}
if (err) |e| {
@@ -3231,7 +3225,7 @@ pub const Interpreter = struct {
pub fn childDone(this: *Pipeline, child: ChildPtr, exit_code: ExitCode) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.cmds.?.len > 0);
+ assert(this.cmds.?.len > 0);
}
const idx = brk: {
@@ -3497,7 +3491,7 @@ pub const Interpreter = struct {
pub fn onIOWriterChunk(this: *Subshell, _: usize, err: ?JSC.SystemError) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state == .wait_write_err);
+ assert(this.state == .wait_write_err);
}
if (err) |e| {
@@ -3834,7 +3828,7 @@ pub const Interpreter = struct {
}
},
.waiting_write_err => return,
- .done => std.debug.assert(false),
+ .done => assert(false),
}
}
@@ -3912,7 +3906,7 @@ pub const Interpreter = struct {
}
pub fn onStatTaskComplete(this: *CondExpr, result: Maybe(bun.Stat)) void {
- if (bun.Environment.allow_assert) std.debug.assert(this.state == .waiting_stat);
+ if (bun.Environment.allow_assert) assert(this.state == .waiting_stat);
this.state = .{
.stat_complete = .{ .stat = result },
@@ -4492,7 +4486,7 @@ pub const Interpreter = struct {
this.base.throw(&bun.shell.ShellErr.newSys(err));
return;
}
- std.debug.assert(this.state == .waiting_write_err);
+ assert(this.state == .waiting_write_err);
this.parent.childDone(this, 1);
return;
}
@@ -4591,7 +4585,7 @@ pub const Interpreter = struct {
if (coro_result == .yield) return;
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.exec == .bltn);
+ assert(this.exec == .bltn);
}
log("Builtin name: {s}", .{@tagName(this.exec)});
@@ -4877,7 +4871,7 @@ pub const Interpreter = struct {
pub fn bufferedOutputCloseStdout(this: *Cmd, err: ?JSC.SystemError) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.exec == .subproc);
+ assert(this.exec == .subproc);
}
log("cmd ({x}) close buffered stdout", .{@intFromPtr(this)});
if (err) |e| {
@@ -4894,7 +4888,7 @@ pub const Interpreter = struct {
pub fn bufferedOutputCloseStderr(this: *Cmd, err: ?JSC.SystemError) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.exec == .subproc);
+ assert(this.exec == .subproc);
}
log("cmd ({x}) close buffered stderr", .{@intFromPtr(this)});
if (err) |e| {
@@ -5062,12 +5056,12 @@ pub const Interpreter = struct {
comptime fmt_: []const u8,
args: anytype,
) void {
- if (bun.Environment.allow_assert) std.debug.assert(this.* == .fd);
+ if (bun.Environment.allow_assert) assert(this.* == .fd);
this.fd.writer.enqueueFmtBltn(ptr, this.fd.captured, kind, fmt_, args);
}
pub fn enqueue(this: *@This(), ptr: anytype, buf: []const u8) void {
- if (bun.Environment.allow_assert) std.debug.assert(this.* == .fd);
+ if (bun.Environment.allow_assert) assert(this.* == .fd);
this.fd.writer.enqueue(ptr, this.fd.captured, buf);
}
};
@@ -6686,7 +6680,7 @@ pub const Interpreter = struct {
pub fn onIOWriterChunk(this: *Export, _: usize, e: ?JSC.SystemError) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.printing);
+ assert(this.printing);
}
const exit_code: ExitCode = if (e != null) brk: {
@@ -6812,7 +6806,7 @@ pub const Interpreter = struct {
pub fn onIOWriterChunk(this: *Echo, _: usize, e: ?JSC.SystemError) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state == .waiting);
+ assert(this.state == .waiting);
}
if (e != null) {
@@ -6937,7 +6931,7 @@ pub const Interpreter = struct {
fn argComplete(this: *Which) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state == .multi_args and this.state.multi_args.state == .waiting_write);
+ assert(this.state == .multi_args and this.state.multi_args.state == .waiting_write);
}
this.state.multi_args.arg_idx += 1;
@@ -6947,7 +6941,7 @@ pub const Interpreter = struct {
pub fn onIOWriterChunk(this: *Which, _: usize, e: ?JSC.SystemError) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state == .one_arg or
+ assert(this.state == .one_arg or
(this.state == .multi_args and this.state.multi_args.state == .waiting_write));
}
@@ -7063,7 +7057,7 @@ pub const Interpreter = struct {
pub fn onIOWriterChunk(this: *Cd, _: usize, e: ?JSC.SystemError) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state == .waiting_write_stderr);
+ assert(this.state == .waiting_write_stderr);
}
if (e != null) {
@@ -7144,7 +7138,7 @@ pub const Interpreter = struct {
pub fn onIOWriterChunk(this: *Pwd, _: usize, e: ?JSC.SystemError) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state == .waiting_io);
+ assert(this.state == .waiting_io);
}
if (e != null) {
@@ -8170,7 +8164,7 @@ pub const Interpreter = struct {
const check_target = &this.state.check_target;
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(check_target.task.result != null);
+ assert(check_target.task.result != null);
}
const maybe_fd: ?bun.FileDescriptor = switch (check_target.task.result.?) {
@@ -8289,8 +8283,8 @@ pub const Interpreter = struct {
_ = task;
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state == .check_target);
- std.debug.assert(this.state.check_target.task.result != null);
+ assert(this.state == .check_target);
+ assert(this.state.check_target.task.result != null);
}
this.state.check_target.state = .done;
@@ -8300,8 +8294,8 @@ pub const Interpreter = struct {
pub fn batchedMoveTaskDone(this: *Mv, task: *ShellMvBatchedTask) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state == .executing);
- std.debug.assert(this.state.executing.tasks_done < this.state.executing.task_count);
+ assert(this.state == .executing);
+ assert(this.state.executing.tasks_done < this.state.executing.task_count);
}
var exec = &this.state.executing;
@@ -8754,7 +8748,7 @@ pub const Interpreter = struct {
pub fn onIOWriterChunk(this: *Rm, _: usize, e: ?JSC.SystemError) void {
log("Rm(0x{x}).onIOWriterChunk()", .{@intFromPtr(this)});
if (comptime bun.Environment.allow_assert) {
- std.debug.assert((this.state == .parse_opts and this.state.parse_opts.state == .wait_write_err) or
+ assert((this.state == .parse_opts and this.state.parse_opts.state == .wait_write_err) or
(this.state == .exec and this.state.exec.state == .waiting and this.state.exec.output_count.load(.SeqCst) > 0));
}
@@ -9213,7 +9207,7 @@ pub const Interpreter = struct {
.deleted_entries = std.ArrayList(u8).init(bun.default_allocator),
.concurrent_task = JSC.EventLoopTask.fromEventLoop(this.event_loop),
};
- std.debug.assert(parent_task.subtask_count.fetchAdd(1, .Monotonic) > 0);
+ assert(parent_task.subtask_count.fetchAdd(1, .Monotonic) > 0);
JSC.WorkPool.schedule(&subtask.task);
}
@@ -9709,7 +9703,7 @@ pub const Interpreter = struct {
pub fn onIOWriterChunk(this: *Exit, _: usize, maybe_e: ?JSC.SystemError) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(this.state == .waiting_io);
+ assert(this.state == .waiting_io);
}
if (maybe_e) |e| {
defer e.deref();
@@ -10717,7 +10711,7 @@ pub const Interpreter = struct {
log("IOWriter(0x{x}, fd={}) getBufferImpl writer_len={} writer_written={}", .{ @intFromPtr(this), this.fd, writer.len, writer.written });
const remaining = writer.len - writer.written;
if (bun.Environment.allow_assert) {
- std.debug.assert(!(writer.len == writer.written));
+ assert(!(writer.len == writer.written));
}
return this.buf.items[this.total_bytes_written .. this.total_bytes_written + remaining];
}
@@ -10737,7 +10731,7 @@ pub const Interpreter = struct {
this.skipDead();
} else {
if (bun.Environment.allow_assert) {
- if (!is_dead) std.debug.assert(current_writer.written == current_writer.len);
+ if (!is_dead) assert(current_writer.written == current_writer.len);
}
this.__idx += 1;
}
@@ -10768,7 +10762,7 @@ pub const Interpreter = struct {
if (bun.Environment.allow_assert) {
if (this.writers.len() > 0) {
const first = this.writers.getConst(this.__idx);
- std.debug.assert(this.buf.items.len >= first.len);
+ assert(this.buf.items.len >= first.len);
}
}
}
@@ -10833,7 +10827,7 @@ pub const Interpreter = struct {
pub fn __deinit(this: *This) void {
print("IOWriter(0x{x}, fd={}) deinit", .{ @intFromPtr(this), this.fd });
- if (bun.Environment.allow_assert) std.debug.assert(this.ref_count == 0);
+ if (bun.Environment.allow_assert) assert(this.ref_count == 0);
this.buf.deinit(bun.default_allocator);
if (comptime bun.Environment.isPosix) {
if (this.writer.handle == .poll and this.writer.handle.poll.isRegistered()) {
@@ -11317,7 +11311,7 @@ pub fn OutputTask(
},
pub fn deinit(this: *@This()) void {
- if (comptime bun.Environment.allow_assert) std.debug.assert(this.state == .done);
+ if (comptime bun.Environment.allow_assert) assert(this.state == .done);
vtable.onDone(this.parent);
this.output.deinit();
bun.destroy(this);
diff --git a/src/shell/shell.zig b/src/shell/shell.zig
index edd603f03a7f75..6b9f7a068e74f5 100644
--- a/src/shell/shell.zig
+++ b/src/shell/shell.zig
@@ -1148,7 +1148,7 @@ pub const Parser = struct {
fn expectIfClauseTextToken(self: *Parser, comptime if_clause_token: @TypeOf(.EnumLiteral)) Token {
const tagname = comptime extractIfClauseTextToken(if_clause_token);
- if (bun.Environment.allow_assert) std.debug.assert(@as(TokenTag, self.peek()) == .Text);
+ if (bun.Environment.allow_assert) assert(@as(TokenTag, self.peek()) == .Text);
if (self.peek() == .Text and
self.delimits(self.peek_n(1)) and
std.mem.eql(u8, self.text(self.peek().Text), tagname))
@@ -1739,7 +1739,7 @@ pub const Parser = struct {
return switch (atoms.items.len) {
0 => null,
1 => {
- if (bun.Environment.allow_assert) std.debug.assert(atoms.capacity == 1);
+ if (bun.Environment.allow_assert) assert(atoms.capacity == 1);
return AST.Atom.new_simple(atoms.items[0]);
},
else => .{ .compound = .{
@@ -1772,7 +1772,7 @@ pub const Parser = struct {
}
fn expect(self: *Parser, toktag: TokenTag) Token {
- if (bun.Environment.allow_assert) std.debug.assert(toktag == @as(TokenTag, self.peek()));
+ if (bun.Environment.allow_assert) assert(toktag == @as(TokenTag, self.peek()));
if (self.check(toktag)) {
return self.advance();
}
@@ -1793,7 +1793,7 @@ pub const Parser = struct {
}
fn expect_delimit(self: *Parser) Token {
- if (bun.Environment.allow_assert) std.debug.assert(self.delimits(self.peek()));
+ if (bun.Environment.allow_assert) assert(self.delimits(self.peek()));
if (self.check(.Delimit) or self.check(.Semicolon) or self.check(.Newline) or self.check(.Eof) or (self.inside_subshell != null and self.check(self.inside_subshell.?.closing_tok()))) {
return self.advance();
}
@@ -2057,7 +2057,7 @@ pub const Token = union(TokenTag) {
end: u32,
pub fn len(range: TextRange) u32 {
- if (bun.Environment.allow_assert) std.debug.assert(range.start <= range.end);
+ if (bun.Environment.allow_assert) assert(range.start <= range.end);
return range.end - range.start;
}
};
@@ -2632,7 +2632,7 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
// Treat newline preceded by backslash as whitespace
else if (char == '\n') {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(input.escaped);
+ assert(input.escaped);
}
if (self.chars.state != .Double) {
try self.break_word_impl(true, true, false);
@@ -4134,7 +4134,7 @@ pub fn SmolList(comptime T: type, comptime INLINED_MAX: comptime_int) type {
}
pub fn initWithSlice(vals: []const T) @This() {
- if (bun.Environment.allow_assert) std.debug.assert(vals.len <= std.math.maxInt(u32));
+ if (bun.Environment.allow_assert) assert(vals.len <= std.math.maxInt(u32));
if (vals.len <= INLINED_MAX) {
var this: @This() = @This().zeroes;
@memcpy(this.inlined.items[0..vals.len], vals);
@@ -4467,7 +4467,7 @@ pub const TestingAPIs = struct {
const script_ast = Interpreter.parse(&arena, script.items[0..], jsobjs.items[0..], jsstrings.items[0..], &out_parser, &out_lex_result) catch |err| {
if (err == ParseError.Lex) {
- if (bun.Environment.allow_assert) std.debug.assert(out_lex_result != null);
+ if (bun.Environment.allow_assert) assert(out_lex_result != null);
const str = out_lex_result.?.combineErrors(arena.allocator());
globalThis.throwPretty("{s}", .{str});
return .undefined;
@@ -4493,3 +4493,5 @@ pub const TestingAPIs = struct {
return bun_str.toJS(globalThis);
}
};
+
+const assert = bun.assert;
diff --git a/src/shell/subproc.zig b/src/shell/subproc.zig
index cdec642d97b36d..94c80f5cdb41e5 100644
--- a/src/shell/subproc.zig
+++ b/src/shell/subproc.zig
@@ -226,7 +226,7 @@ pub const ShellSubprocess = struct {
};
},
.memfd => |memfd| {
- std.debug.assert(memfd != bun.invalid_fd);
+ assert(memfd != bun.invalid_fd);
return Writable{ .memfd = memfd };
},
.fd => {
@@ -1223,7 +1223,7 @@ pub const PipeReader = struct {
) void {
if (!this.isDone()) return;
log("signalDoneToCmd ({x}: {s}) isDone={any}", .{ @intFromPtr(this), @tagName(this.out_type), this.isDone() });
- if (bun.Environment.allow_assert) std.debug.assert(this.process != null);
+ if (bun.Environment.allow_assert) assert(this.process != null);
if (this.process) |proc| {
if (proc.cmd_parent) |cmd| {
if (this.captured_writer.err) |e| {
@@ -1357,11 +1357,11 @@ pub const PipeReader = struct {
pub fn deinit(this: *PipeReader) void {
log("PipeReader(0x{x}, {s}) deinit()", .{ @intFromPtr(this), @tagName(this.out_type) });
if (comptime Environment.isPosix) {
- std.debug.assert(this.reader.isDone() or this.state == .err);
+ assert(this.reader.isDone() or this.state == .err);
}
if (comptime Environment.isWindows) {
- std.debug.assert(this.reader.source == null or this.reader.source.?.isClosed());
+ assert(this.reader.source == null or this.reader.source.?.isClosed());
}
if (this.state == .done) {
@@ -1396,8 +1396,10 @@ pub inline fn assertStdioResult(result: StdioResult) void {
if (comptime Environment.allow_assert) {
if (Environment.isPosix) {
if (result) |fd| {
- std.debug.assert(fd != bun.invalid_fd);
+ assert(fd != bun.invalid_fd);
}
}
}
}
+
+const assert = bun.assert;
diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig
index a486378f7acc38..85e8cf4a223c0f 100644
--- a/src/sourcemap/sourcemap.zig
+++ b/src/sourcemap/sourcemap.zig
@@ -443,8 +443,8 @@ pub const LineColumnOffset = struct {
defer this.columns = columns;
var offset: u32 = 0;
while (strings.indexOfNewlineOrNonASCII(input, offset)) |i| {
- std.debug.assert(i >= offset);
- std.debug.assert(i < input.len);
+ assert(i >= offset);
+ assert(i < input.len);
var iter = strings.CodepointIterator.initOffset(input, i);
var cursor = strings.CodepointIterator.Cursor{ .i = @as(u32, @truncate(iter.i)) };
@@ -581,7 +581,7 @@ pub const SourceMapPieces = struct {
j.push(mappings[start_of_run..potential_end_of_run]);
- std.debug.assert(shift.before.lines == shift.after.lines);
+ assert(shift.before.lines == shift.after.lines);
const shift_column_delta = shift.after.columns - shift.before.columns;
const encode = encodeVLQ(decode_result.value + shift_column_delta - prev_shift_column_delta);
@@ -839,7 +839,7 @@ pub const LineOffsetTable = struct {
pub const List = std.MultiArrayList(LineOffsetTable);
pub fn findLine(byte_offsets_to_start_of_line: []const u32, loc: Logger.Loc) i32 {
- std.debug.assert(loc.start > -1); // checked by caller
+ assert(loc.start > -1); // checked by caller
var original_line: usize = 0;
const loc_start = @as(usize, @intCast(loc.start));
@@ -862,7 +862,7 @@ pub const LineOffsetTable = struct {
}
pub fn findIndex(byte_offsets_to_start_of_line: []const u32, loc: Logger.Loc) ?usize {
- std.debug.assert(loc.start > -1); // checked by caller
+ assert(loc.start > -1); // checked by caller
var original_line: usize = 0;
const loc_start = @as(usize, @intCast(loc.start));
@@ -924,7 +924,7 @@ pub const LineOffsetTable = struct {
}
if (c > 0x7F and columns_for_non_ascii.items.len == 0) {
- std.debug.assert(@intFromPtr(
+ assert(@intFromPtr(
remaining.ptr,
) >= @intFromPtr(
contents.ptr,
@@ -1466,3 +1466,5 @@ pub const DebugIDFormatter = struct {
try writer.writeAll(&buf);
}
};
+
+const assert = bun.assert;
diff --git a/src/string.zig b/src/string.zig
index b242f067027c0f..17d8bd223fb5e9 100644
--- a/src/string.zig
+++ b/src/string.zig
@@ -72,19 +72,19 @@ pub const WTFStringImplStruct = extern struct {
}
pub inline fn utf16Slice(self: WTFStringImpl) []const u16 {
- std.debug.assert(!is8Bit(self));
+ bun.assert(!is8Bit(self));
return self.m_ptr.utf16[0..length(self)];
}
pub inline fn latin1Slice(self: WTFStringImpl) []const u8 {
- std.debug.assert(is8Bit(self));
+ bun.assert(is8Bit(self));
return self.m_ptr.latin1[0..length(self)];
}
/// Caller must ensure that the string is 8-bit and ASCII.
pub inline fn utf8Slice(self: WTFStringImpl) []const u8 {
if (comptime bun.Environment.allow_assert)
- std.debug.assert(canUseAsUTF8(self));
+ bun.assert(canUseAsUTF8(self));
return self.m_ptr.latin1[0..length(self)];
}
@@ -99,11 +99,11 @@ pub const WTFStringImplStruct = extern struct {
pub inline fn deref(self: WTFStringImpl) void {
JSC.markBinding(@src());
const current_count = self.refCount();
- std.debug.assert(current_count > 0);
+ bun.assert(current_count > 0);
Bun__WTFStringImpl__deref(self);
if (comptime bun.Environment.allow_assert) {
if (current_count > 1) {
- std.debug.assert(self.refCount() < current_count or self.isStatic());
+ bun.assert(self.refCount() < current_count or self.isStatic());
}
}
}
@@ -111,9 +111,9 @@ pub const WTFStringImplStruct = extern struct {
pub inline fn ref(self: WTFStringImpl) void {
JSC.markBinding(@src());
const current_count = self.refCount();
- std.debug.assert(current_count > 0);
+ bun.assert(current_count > 0);
Bun__WTFStringImpl__ref(self);
- std.debug.assert(self.refCount() > current_count or self.isStatic());
+ bun.assert(self.refCount() > current_count or self.isStatic());
}
pub fn toLatin1Slice(this: WTFStringImpl) ZigString.Slice {
@@ -240,8 +240,8 @@ pub const StringImplAllocator = struct {
_: usize,
) void {
var this = bun.cast(WTFStringImpl, ptr);
- std.debug.assert(this.latin1Slice().ptr == buf.ptr);
- std.debug.assert(this.latin1Slice().len == buf.len);
+ bun.assert(this.latin1Slice().ptr == buf.ptr);
+ bun.assert(this.latin1Slice().len == buf.len);
this.deref();
}
@@ -337,7 +337,7 @@ pub const String = extern struct {
}
fn createUninitializedLatin1(len: usize) struct { String, []u8 } {
- std.debug.assert(len > 0);
+ bun.assert(len > 0);
const string = BunString__fromLatin1Unitialized(len);
const wtf = string.value.WTFStringImpl;
return .{
@@ -347,7 +347,7 @@ pub const String = extern struct {
}
fn createUninitializedUTF16(len: usize) struct { String, []u16 } {
- std.debug.assert(len > 0);
+ bun.assert(len > 0);
const string = BunString__fromUTF16Unitialized(len);
const wtf = string.value.WTFStringImpl;
return .{
@@ -377,7 +377,7 @@ pub const String = extern struct {
comptime kind: WTFStringEncoding,
len: usize,
) struct { String, [](kind.Byte()) } {
- std.debug.assert(len > 0);
+ bun.assert(len > 0);
return switch (comptime kind) {
.latin1 => createUninitializedLatin1(len),
.utf16 => createUninitializedUTF16(len),
@@ -555,7 +555,7 @@ pub const String = extern struct {
pub fn createExternal(bytes: []const u8, isLatin1: bool, ctx: ?*anyopaque, callback: ?*const ExternalStringImplFreeFunction) String {
JSC.markBinding(@src());
- std.debug.assert(bytes.len > 0);
+ bun.assert(bytes.len > 0);
return BunString__createExternal(bytes.ptr, bytes.len, isLatin1, ctx, callback);
}
@@ -571,7 +571,7 @@ pub const String = extern struct {
pub fn createExternalGloballyAllocated(comptime kind: WTFStringEncoding, bytes: []kind.Byte()) String {
JSC.markBinding(@src());
- std.debug.assert(bytes.len > 0);
+ bun.assert(bytes.len > 0);
return switch (comptime kind) {
.latin1 => BunString__createExternalGloballyAllocatedLatin1(bytes.ptr, bytes.len),
@@ -775,8 +775,8 @@ pub const String = extern struct {
pub inline fn utf8(self: String) []const u8 {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(self.tag == .ZigString or self.tag == .StaticZigString);
- std.debug.assert(self.canBeUTF8());
+ bun.assert(self.tag == .ZigString or self.tag == .StaticZigString);
+ bun.assert(self.canBeUTF8());
}
return self.value.ZigString.slice();
}
@@ -872,8 +872,8 @@ pub const String = extern struct {
}
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(!isWTFAllocator(slice.allocator.get().?)); // toUTF8WithoutRef() should never return a WTF allocator
- std.debug.assert(slice.allocator.get().?.vtable == allocator.vtable); // assert that the allocator is the same
+ bun.assert(!isWTFAllocator(slice.allocator.get().?)); // toUTF8WithoutRef() should never return a WTF allocator
+ bun.assert(slice.allocator.get().?.vtable == allocator.vtable); // assert that the allocator is the same
}
// We've already cloned the string, so let's just return the slice.
@@ -901,8 +901,8 @@ pub const String = extern struct {
}
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(!isWTFAllocator(slice.allocator.get().?)); // toUTF8WithoutRef() should never return a WTF allocator
- std.debug.assert(slice.allocator.get().?.vtable == allocator.vtable); // assert that the allocator is the same
+ bun.assert(!isWTFAllocator(slice.allocator.get().?)); // toUTF8WithoutRef() should never return a WTF allocator
+ bun.assert(slice.allocator.get().?.vtable == allocator.vtable); // assert that the allocator is the same
}
// We did have to clone the string. Let's avoid keeping the WTFStringImpl around
@@ -951,7 +951,7 @@ pub const String = extern struct {
pub fn charAt(this: String, index: usize) u16 {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(index < this.length());
+ bun.assert(index < this.length());
}
return switch (this.tag) {
.WTFStringImpl => if (this.value.WTFStringImpl.is8Bit()) @intCast(this.value.WTFStringImpl.utf8Slice()[index]) else this.value.WTFStringImpl.utf16Slice()[index],
@@ -962,7 +962,7 @@ pub const String = extern struct {
pub fn charAtU8(this: String, index: usize) u8 {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(index < this.length());
+ bun.assert(index < this.length());
}
return switch (this.tag) {
.WTFStringImpl => if (this.value.WTFStringImpl.is8Bit()) this.value.WTFStringImpl.utf8Slice()[index] else @truncate(this.value.WTFStringImpl.utf16Slice()[index]),
@@ -972,7 +972,7 @@ pub const String = extern struct {
}
pub fn indexOfAsciiChar(this: String, chr: u8) ?usize {
- std.debug.assert(chr < 128);
+ bun.assert(chr < 128);
return switch (this.isUTF16()) {
true => std.mem.indexOfScalar(u16, this.utf16(), @intCast(chr)),
false => bun.strings.indexOfCharUsize(this.byteSlice(), chr),
@@ -1026,7 +1026,7 @@ pub const String = extern struct {
const bytes = this.byteSlice();
inline for (0..values.len) |i| {
- std.debug.assert(bytes.len == values[i].len);
+ bun.assert(bytes.len == values[i].len);
if (bun.strings.eqlComptimeCheckLenWithType(u8, bytes, values[i], false)) {
return i;
}
@@ -1067,7 +1067,7 @@ pub const String = extern struct {
const bytes = this.byteSlice();
inline for (0..values.len) |i| {
- std.debug.assert(bytes.len == values[i].len);
+ bun.assert(bytes.len == values[i].len);
if (bun.strings.eqlCaseInsensitiveASCIIIgnoreLength(bytes, values[i])) {
return i;
}
@@ -1220,7 +1220,7 @@ pub const SliceWithUnderlyingString = struct {
pub inline fn reportExtraMemory(this: *SliceWithUnderlyingString, vm: *JSC.VM) void {
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(!this.did_report_extra_memory_debug);
+ bun.assert(!this.did_report_extra_memory_debug);
this.did_report_extra_memory_debug = true;
}
this.utf8.reportExtraMemory(vm);
@@ -1306,7 +1306,7 @@ pub const SliceWithUnderlyingString = struct {
if ((this.underlying.tag == .Dead or this.underlying.tag == .Empty) and this.utf8.length() > 0) {
if (comptime bun.Environment.allow_assert) {
if (this.utf8.allocator.get()) |allocator| {
- std.debug.assert(!String.isWTFAllocator(allocator)); // We should never enter this state.
+ bun.assert(!String.isWTFAllocator(allocator)); // We should never enter this state.
}
}
diff --git a/src/string_builder.zig b/src/string_builder.zig
index 18a356487426e2..8d948985f08c69 100644
--- a/src/string_builder.zig
+++ b/src/string_builder.zig
@@ -4,7 +4,7 @@ const bun = @import("root").bun;
const Environment = bun.Environment;
const string = @import("string_types.zig").string;
const StringBuilder = @This();
-const assert = std.debug.assert;
+const assert = bun.assert;
const DebugHashTable = if (Environment.allow_assert) std.AutoHashMapUnmanaged(u64, void) else void;
diff --git a/src/string_immutable.zig b/src/string_immutable.zig
index 8769058d50587c..4f5e9ed361a2ca 100644
--- a/src/string_immutable.zig
+++ b/src/string_immutable.zig
@@ -124,7 +124,7 @@ pub fn indexOfAny(slice: string, comptime str: anytype) ?OptionalUsize {
remaining = remaining[ascii_vector_size..];
}
- if (comptime Environment.allow_assert) std.debug.assert(remaining.len < ascii_vector_size);
+ if (comptime Environment.allow_assert) assert(remaining.len < ascii_vector_size);
}
for (remaining, 0..) |c, i| {
@@ -288,7 +288,7 @@ pub inline fn indexOf(self: string, str: string) ?usize {
const start = bun.C.memmem(self_ptr, self_len, str_ptr, str_len) orelse return null;
const i = @intFromPtr(start) - @intFromPtr(self_ptr);
- std.debug.assert(i < self_len);
+ bun.unsafeAssert(i < self_len);
return @as(usize, @intCast(i));
}
@@ -310,7 +310,7 @@ pub const SplitIterator = struct {
/// Returns a slice of the first field. This never fails.
/// Call this only to get the first field and then use `next` to get all subsequent fields.
pub fn first(self: *Self) []const u8 {
- std.debug.assert(self.index.? == 0);
+ bun.unsafeAssert(self.index.? == 0);
return self.next().?;
}
@@ -398,7 +398,7 @@ pub const StringOrTinyString = struct {
} = .{},
comptime {
- std.debug.assert(@sizeOf(@This()) == 32);
+ bun.unsafeAssert(@sizeOf(@This()) == 32);
}
pub inline fn slice(this: *const StringOrTinyString) []const u8 {
@@ -979,8 +979,8 @@ pub fn eqlCaseInsensitiveASCII(a: string, b: string, comptime check_len: bool) b
if (a.len == 0) return true;
}
- std.debug.assert(b.len > 0);
- std.debug.assert(a.len > 0);
+ bun.unsafeAssert(b.len > 0);
+ bun.unsafeAssert(a.len > 0);
return bun.C.strncasecmp(a.ptr, b.ptr, a.len) == 0;
}
@@ -997,7 +997,7 @@ pub fn eqlLong(a_str: string, b_str: string, comptime check_len: bool) bool {
return false;
}
} else {
- if (comptime Environment.allow_assert) std.debug.assert(b_str.len == a_str.len);
+ if (comptime Environment.allow_assert) assert(b_str.len == a_str.len);
}
const end = b_str.ptr + len;
@@ -1116,7 +1116,7 @@ pub inline fn appendUTF8MachineWordToUTF16MachineWord(output: *[@sizeOf(usize) /
pub inline fn copyU8IntoU16(output_: []u16, input_: []const u8) void {
const output = output_;
const input = input_;
- if (comptime Environment.allow_assert) std.debug.assert(input.len <= output.len);
+ if (comptime Environment.allow_assert) assert(input.len <= output.len);
// https://zig.godbolt.org/z/9rTn1orcY
@@ -1136,7 +1136,7 @@ pub fn copyU8IntoU16WithAlignment(comptime alignment: u21, output_: []align(alig
var output = output_;
var input = input_;
const word = @sizeOf(usize) / 2;
- if (comptime Environment.allow_assert) std.debug.assert(input.len <= output.len);
+ if (comptime Environment.allow_assert) assert(input.len <= output.len);
// un-aligned data access is slow
// so we attempt to align the data
@@ -1159,7 +1159,7 @@ pub fn copyU8IntoU16WithAlignment(comptime alignment: u21, output_: []align(alig
// pub inline fn copy(output_: []u8, input_: []const u8) void {
// var output = output_;
// var input = input_;
-// if (comptime Environment.allow_assert) std.debug.assert(input.len <= output.len);
+// if (comptime Environment.allow_assert) assert(input.len <= output.len);
// if (input.len > @sizeOf(usize) * 4) {
// comptime var i: usize = 0;
@@ -1182,10 +1182,10 @@ pub fn copyU8IntoU16WithAlignment(comptime alignment: u21, output_: []align(alig
// }
pub inline fn copyU16IntoU8(output_: []u8, comptime InputType: type, input_: InputType) void {
- if (comptime Environment.allow_assert) std.debug.assert(input_.len <= output_.len);
+ if (comptime Environment.allow_assert) assert(input_.len <= output_.len);
var output = output_;
var input = input_;
- if (comptime Environment.allow_assert) std.debug.assert(input.len <= output.len);
+ if (comptime Environment.allow_assert) assert(input.len <= output.len);
// https://zig.godbolt.org/z/9rTn1orcY
@@ -1445,7 +1445,7 @@ pub fn toUTF16Alloc(allocator: std.mem.Allocator, bytes: []const u8, comptime fa
const replacement = strings.convertUTF8BytesIntoUTF16(&sequence);
if (comptime fail_if_invalid) {
if (replacement.fail) {
- if (comptime Environment.allow_assert) std.debug.assert(replacement.code_point == unicode_replacement);
+ if (comptime Environment.allow_assert) assert(replacement.code_point == unicode_replacement);
return error.InvalidByteSequence;
}
}
@@ -1480,7 +1480,7 @@ pub fn toUTF16Alloc(allocator: std.mem.Allocator, bytes: []const u8, comptime fa
const replacement = strings.convertUTF8BytesIntoUTF16(&sequence);
if (comptime fail_if_invalid) {
if (replacement.fail) {
- if (comptime Environment.allow_assert) std.debug.assert(replacement.code_point == unicode_replacement);
+ if (comptime Environment.allow_assert) assert(replacement.code_point == unicode_replacement);
return error.InvalidByteSequence;
}
}
@@ -1579,7 +1579,7 @@ pub fn toUTF16AllocNoTrim(allocator: std.mem.Allocator, bytes: []const u8, compt
const replacement = strings.convertUTF8BytesIntoUTF16(&sequence);
if (comptime fail_if_invalid) {
if (replacement.fail) {
- if (comptime Environment.allow_assert) std.debug.assert(replacement.code_point == unicode_replacement);
+ if (comptime Environment.allow_assert) assert(replacement.code_point == unicode_replacement);
return error.InvalidByteSequence;
}
}
@@ -1614,7 +1614,7 @@ pub fn toUTF16AllocNoTrim(allocator: std.mem.Allocator, bytes: []const u8, compt
const replacement = strings.convertUTF8BytesIntoUTF16(&sequence);
if (comptime fail_if_invalid) {
if (replacement.fail) {
- if (comptime Environment.allow_assert) std.debug.assert(replacement.code_point == unicode_replacement);
+ if (comptime Environment.allow_assert) assert(replacement.code_point == unicode_replacement);
return error.InvalidByteSequence;
}
}
@@ -1710,13 +1710,13 @@ pub fn utf16Codepoint(comptime Type: type, input: Type) UTF16Replacement {
/// an assertion, and PosixToWinNormalizer can help make an absolute path
/// contain a drive letter.
pub fn isWindowsAbsolutePathMissingDriveLetter(comptime T: type, chars: []const T) bool {
- std.debug.assert(bun.path.Platform.windows.isAbsoluteT(T, chars));
- std.debug.assert(chars.len > 0);
+ bun.unsafeAssert(bun.path.Platform.windows.isAbsoluteT(T, chars));
+ bun.unsafeAssert(chars.len > 0);
// 'C:\hello' -> false
if (!(chars[0] == '/' or chars[0] == '\\')) {
- std.debug.assert(chars.len > 2);
- std.debug.assert(chars[1] == ':');
+ bun.unsafeAssert(chars.len > 2);
+ bun.unsafeAssert(chars[1] == ':');
return false;
}
@@ -1744,9 +1744,9 @@ pub fn isWindowsAbsolutePathMissingDriveLetter(comptime T: type, chars: []const
}
pub fn fromWPath(buf: []u8, utf16: []const u16) [:0]const u8 {
- std.debug.assert(buf.len > 0);
+ bun.unsafeAssert(buf.len > 0);
const encode_into_result = copyUTF16IntoUTF8(buf[0 .. buf.len - 1], []const u16, utf16, false);
- std.debug.assert(encode_into_result.written < buf.len);
+ bun.unsafeAssert(encode_into_result.written < buf.len);
buf[encode_into_result.written] = 0;
return buf[0..encode_into_result.written :0];
}
@@ -1780,7 +1780,7 @@ pub fn addNTPathPrefixIfNeeded(wbuf: []u16, utf16: []const u16) [:0]const u16 {
pub const toNTDir = toNTPath;
pub fn toExtendedPathNormalized(wbuf: []u16, utf8: []const u8) [:0]const u16 {
- std.debug.assert(wbuf.len > 4);
+ bun.unsafeAssert(wbuf.len > 4);
wbuf[0..4].* = bun.windows.nt_maxpath_prefix;
return wbuf[0 .. toWPathNormalized(wbuf[4..], utf8).len + 4 :0];
}
@@ -1807,7 +1807,7 @@ pub fn toWPathNormalized(wbuf: []u16, utf8: []const u8) [:0]const u16 {
}
pub fn normalizeSlashesOnly(buf: []u8, utf8: []const u8, comptime desired_slash: u8) []const u8 {
- comptime std.debug.assert(desired_slash == '/' or desired_slash == '\\');
+ comptime bun.unsafeAssert(desired_slash == '/' or desired_slash == '\\');
const undesired_slash = if (desired_slash == '/') '\\' else '/';
if (bun.strings.containsChar(utf8, undesired_slash)) {
@@ -1866,7 +1866,7 @@ pub fn assertIsValidWindowsPath(comptime T: type, path: []const T) void {
}
pub fn toWPathMaybeDir(wbuf: []u16, utf8: []const u8, comptime add_trailing_lash: bool) [:0]const u16 {
- std.debug.assert(wbuf.len > 0);
+ bun.unsafeAssert(wbuf.len > 0);
var result = bun.simdutf.convert.utf8.to.utf16.with_errors.le(
utf8,
@@ -1938,7 +1938,7 @@ pub fn toUTF8ListWithType(list_: std.ArrayList(u8), comptime Type: type, utf16:
// which uses 3 bytes for invalid surrogates, causing the length to not
// match from simdutf.
// if (Environment.allow_assert) {
- // std.debug.assert(buf.items.len == length);
+ // bun.unsafeAssert(buf.items.len == length);
// }
return buf;
@@ -2035,7 +2035,7 @@ pub fn allocateLatin1IntoUTF8WithList(list_: std.ArrayList(u8), offset_into_list
try list.ensureUnusedCapacity(latin1.len);
while (latin1.len > 0) {
- if (comptime Environment.allow_assert) std.debug.assert(i < list.capacity);
+ if (comptime Environment.allow_assert) assert(i < list.capacity);
var buf = list.items.ptr[i..list.capacity];
inner: {
@@ -2056,7 +2056,7 @@ pub fn allocateLatin1IntoUTF8WithList(list_: std.ArrayList(u8), offset_into_list
if (mask > 0) {
const first_set_byte = @ctz(mask) / 8;
- if (comptime Environment.allow_assert) std.debug.assert(latin1[first_set_byte] >= 127);
+ if (comptime Environment.allow_assert) assert(latin1[first_set_byte] >= 127);
buf[0..size].* = @as([size]u8, @bitCast(bytes));
buf = buf[first_set_byte..];
@@ -2076,7 +2076,7 @@ pub fn allocateLatin1IntoUTF8WithList(list_: std.ArrayList(u8), offset_into_list
if (mask > 0) {
const first_set_byte = @ctz(mask) / 8;
- if (comptime Environment.allow_assert) std.debug.assert(latin1[first_set_byte] >= 127);
+ if (comptime Environment.allow_assert) assert(latin1[first_set_byte] >= 127);
buf[0..size].* = @as([size]u8, @bitCast(bytes));
buf = buf[first_set_byte..];
@@ -2103,7 +2103,7 @@ pub fn allocateLatin1IntoUTF8WithList(list_: std.ArrayList(u8), offset_into_list
if (mask > 0) {
const first_set_byte = @ctz(mask) / 8;
- if (comptime Environment.allow_assert) std.debug.assert(latin1[first_set_byte] >= 127);
+ if (comptime Environment.allow_assert) assert(latin1[first_set_byte] >= 127);
buf[0..size].* = @as([size]u8, @bitCast(bytes));
latin1 = latin1[first_set_byte..];
@@ -2117,7 +2117,7 @@ pub fn allocateLatin1IntoUTF8WithList(list_: std.ArrayList(u8), offset_into_list
}
{
- if (comptime Environment.allow_assert) std.debug.assert(latin1.len < 8);
+ if (comptime Environment.allow_assert) assert(latin1.len < 8);
const end = latin1.ptr + latin1.len;
while (latin1.ptr != end and latin1[0] < 128) {
buf[0] = latin1[0];
@@ -2167,13 +2167,13 @@ pub const UTF16Replacement = struct {
// This variation matches WebKit behavior.
pub fn convertUTF8BytesIntoUTF16(sequence: *const [4]u8) UTF16Replacement {
- if (comptime Environment.allow_assert) std.debug.assert(sequence[0] > 127);
+ if (comptime Environment.allow_assert) assert(sequence[0] > 127);
const len = wtf8ByteSequenceLengthWithInvalid(sequence[0]);
switch (len) {
2 => {
if (comptime Environment.allow_assert) {
- std.debug.assert(sequence[0] >= 0xC0);
- std.debug.assert(sequence[0] <= 0xDF);
+ bun.assert(sequence[0] >= 0xC0);
+ bun.assert(sequence[0] <= 0xDF);
}
if (sequence[1] < 0x80 or sequence[1] > 0xBF) {
return .{ .len = 1, .fail = true };
@@ -2182,8 +2182,8 @@ pub fn convertUTF8BytesIntoUTF16(sequence: *const [4]u8) UTF16Replacement {
},
3 => {
if (comptime Environment.allow_assert) {
- std.debug.assert(sequence[0] >= 0xE0);
- std.debug.assert(sequence[0] <= 0xEF);
+ bun.assert(sequence[0] >= 0xE0);
+ bun.assert(sequence[0] <= 0xEF);
}
switch (sequence[0]) {
0xE0 => {
@@ -2295,7 +2295,7 @@ pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_
if (mask > 0) {
const first_set_byte = @ctz(mask) / 8;
- if (comptime Environment.allow_assert) std.debug.assert(latin1[first_set_byte] >= 127);
+ if (comptime Environment.allow_assert) assert(latin1[first_set_byte] >= 127);
buf = buf[first_set_byte..];
latin1 = latin1[first_set_byte..];
@@ -2313,9 +2313,9 @@ pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_
buf[0..size].* = @as([size]u8, @bitCast(bytes));
- if (comptime Environment.allow_assert) std.debug.assert(mask > 0);
+ if (comptime Environment.allow_assert) assert(mask > 0);
const first_set_byte = @ctz(mask) / 8;
- if (comptime Environment.allow_assert) std.debug.assert(latin1[first_set_byte] >= 127);
+ if (comptime Environment.allow_assert) assert(latin1[first_set_byte] >= 127);
buf = buf[first_set_byte..];
latin1 = latin1[first_set_byte..];
@@ -2344,7 +2344,7 @@ pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_
if (mask > 0) {
const first_set_byte = @ctz(mask) / 8;
if (comptime stop) return .{ .written = std.math.maxInt(u32), .read = std.math.maxInt(u32) };
- if (comptime Environment.allow_assert) std.debug.assert(latin1[first_set_byte] >= 127);
+ if (comptime Environment.allow_assert) assert(latin1[first_set_byte] >= 127);
buf = buf[first_set_byte..];
latin1 = latin1[first_set_byte..];
@@ -2359,7 +2359,7 @@ pub fn copyLatin1IntoUTF8StopOnNonASCII(buf_: []u8, comptime Type: type, latin1_
{
const end = latin1.ptr + @min(buf.len, latin1.len);
- if (comptime Environment.allow_assert) std.debug.assert(@intFromPtr(latin1.ptr + 8) > @intFromPtr(end));
+ if (comptime Environment.allow_assert) assert(@intFromPtr(latin1.ptr + 8) > @intFromPtr(end));
const start_ptr = @intFromPtr(buf.ptr);
const start_ptr_latin1 = @intFromPtr(latin1.ptr);
@@ -2657,7 +2657,7 @@ pub fn escapeHTMLForLatin1Input(allocator: std.mem.Allocator, latin1: []const u8
// pass #1: scan for any characters that need escaping
// assume most strings won't need any escaping, so don't actually allocate the buffer
scan_and_allocate_lazily: while (remaining.len >= ascii_vector_size) {
- if (comptime Environment.allow_assert) std.debug.assert(!any_needs_escape);
+ if (comptime Environment.allow_assert) assert(!any_needs_escape);
const vec: AsciiVector = remaining[0..ascii_vector_size].*;
if (@reduce(.Max, @as(AsciiVectorU1, @bitCast((vec == vecs[0]))) |
@as(AsciiVectorU1, @bitCast((vec == vecs[1]))) |
@@ -2665,7 +2665,7 @@ pub fn escapeHTMLForLatin1Input(allocator: std.mem.Allocator, latin1: []const u8
@as(AsciiVectorU1, @bitCast((vec == vecs[3]))) |
@as(AsciiVectorU1, @bitCast((vec == vecs[4])))) == 1)
{
- if (comptime Environment.allow_assert) std.debug.assert(buf.capacity == 0);
+ if (comptime Environment.allow_assert) assert(buf.capacity == 0);
buf = try std.ArrayList(u8).initCapacity(allocator, latin1.len + 6);
const copy_len = @intFromPtr(remaining.ptr) - @intFromPtr(latin1.ptr);
@@ -2776,7 +2776,7 @@ pub fn escapeHTMLForLatin1Input(allocator: std.mem.Allocator, latin1: []const u8
scan_and_allocate_lazily: while (ptr != end) : (ptr += 1) {
switch (ptr[0]) {
'"', '&', '\'', '<', '>' => |c| {
- if (comptime Environment.allow_assert) std.debug.assert(buf.capacity == 0);
+ if (comptime Environment.allow_assert) assert(buf.capacity == 0);
buf = try std.ArrayList(u8).initCapacity(allocator, latin1.len + @as(usize, Scalar.lengths[c]));
const copy_len = @intFromPtr(ptr) - @intFromPtr(latin1.ptr);
@@ -2814,7 +2814,7 @@ pub fn escapeHTMLForLatin1Input(allocator: std.mem.Allocator, latin1: []const u8
}
if (!any_needs_escape) {
- if (comptime Environment.allow_assert) std.debug.assert(buf.capacity == 0);
+ if (comptime Environment.allow_assert) assert(buf.capacity == 0);
return Escaped(u8){ .original = {} };
}
@@ -2908,7 +2908,7 @@ pub fn escapeHTMLForUTF16Input(allocator: std.mem.Allocator, utf16: []const u16)
// pass #1: scan for any characters that need escaping
// assume most strings won't need any escaping, so don't actually allocate the buffer
scan_and_allocate_lazily: while (remaining.len >= ascii_u16_vector_size) {
- if (comptime Environment.allow_assert) std.debug.assert(!any_needs_escape);
+ if (comptime Environment.allow_assert) assert(!any_needs_escape);
const vec: AsciiU16Vector = remaining[0..ascii_u16_vector_size].*;
if (@reduce(.Max, @as(AsciiVectorU16U1, @bitCast(vec > @as(AsciiU16Vector, @splat(@as(u16, 127))))) |
@as(AsciiVectorU16U1, @bitCast((vec == vecs[0]))) |
@@ -2941,7 +2941,7 @@ pub fn escapeHTMLForUTF16Input(allocator: std.mem.Allocator, utf16: []const u16)
continue :scan_and_allocate_lazily;
}
- if (comptime Environment.allow_assert) std.debug.assert(@intFromPtr(remaining.ptr + i) >= @intFromPtr(utf16.ptr));
+ if (comptime Environment.allow_assert) assert(@intFromPtr(remaining.ptr + i) >= @intFromPtr(utf16.ptr));
const to_copy = std.mem.sliceAsBytes(utf16)[0 .. @intFromPtr(remaining.ptr + i) - @intFromPtr(utf16.ptr)];
const to_copy_16 = std.mem.bytesAsSlice(u16, to_copy);
buf = try std.ArrayList(u16).initCapacity(allocator, utf16.len + 6);
@@ -3053,7 +3053,7 @@ pub fn escapeHTMLForUTF16Input(allocator: std.mem.Allocator, utf16: []const u16)
switch (ptr[0]) {
'"', '&', '\'', '<', '>' => |c| {
buf = try std.ArrayList(u16).initCapacity(allocator, utf16.len + @as(usize, Scalar.lengths[c]));
- if (comptime Environment.allow_assert) std.debug.assert(@intFromPtr(ptr) >= @intFromPtr(utf16.ptr));
+ if (comptime Environment.allow_assert) assert(@intFromPtr(ptr) >= @intFromPtr(utf16.ptr));
const to_copy = std.mem.sliceAsBytes(utf16)[0 .. @intFromPtr(ptr) - @intFromPtr(utf16.ptr)];
const to_copy_16 = std.mem.bytesAsSlice(u16, to_copy);
@@ -3486,7 +3486,7 @@ pub inline fn wtf8ByteSequenceLengthWithInvalid(first_byte: u8) u3 {
/// which was a clone of golang's "utf8.DecodeRune" that was modified to decode using WTF-8 instead.
/// Asserts a multi-byte codepoint
pub inline fn decodeWTF8RuneTMultibyte(p: *const [4]u8, len: u3, comptime T: type, comptime zero: T) T {
- if (comptime Environment.allow_assert) std.debug.assert(len > 1);
+ if (comptime Environment.allow_assert) assert(len > 1);
const s1 = p[1];
if ((s1 & 0xC0) != 0x80) return zero;
@@ -3680,10 +3680,10 @@ pub fn firstNonASCIIWithType(comptime Type: type, slice: Type) ?u32 {
if (mask > 0) {
const first_set_byte = @ctz(mask) / 8;
- if (comptime Environment.allow_assert) {
- std.debug.assert(remaining[first_set_byte] > 127);
+ if (comptime Environment.isDebug) {
+ bun.assert(remaining[first_set_byte] > 127);
for (0..first_set_byte) |j| {
- std.debug.assert(remaining[j] <= 127);
+ bun.assert(remaining[j] <= 127);
}
}
@@ -3697,10 +3697,10 @@ pub fn firstNonASCIIWithType(comptime Type: type, slice: Type) ?u32 {
if (mask > 0) {
const first_set_byte = @ctz(mask) / 8;
- if (comptime Environment.allow_assert) {
- std.debug.assert(remaining[first_set_byte] > 127);
+ if (comptime Environment.isDebug) {
+ bun.assert(remaining[first_set_byte] > 127);
for (0..first_set_byte) |j| {
- std.debug.assert(remaining[j] <= 127);
+ bun.assert(remaining[j] <= 127);
}
}
@@ -3728,8 +3728,8 @@ pub fn firstNonASCIIWithType(comptime Type: type, slice: Type) ?u32 {
if (comptime Environment.enableSIMD) {
// these assertions exist more so for LLVM
- std.debug.assert(remaining.len < ascii_vector_size);
- std.debug.assert(@intFromPtr(remaining.ptr + ascii_vector_size) > @intFromPtr(remaining_end));
+ bun.unsafeAssert(remaining.len < ascii_vector_size);
+ bun.unsafeAssert(@intFromPtr(remaining.ptr + ascii_vector_size) > @intFromPtr(remaining_end));
}
if (remaining.len >= size) {
@@ -3741,10 +3741,10 @@ pub fn firstNonASCIIWithType(comptime Type: type, slice: Type) ?u32 {
if (mask > 0) {
remaining.len -= @intFromPtr(remaining.ptr) - @intFromPtr(remaining_start);
const first_set_byte = @ctz(mask) / 8;
- if (comptime Environment.allow_assert) {
- std.debug.assert(remaining[first_set_byte] > 127);
+ if (comptime Environment.isDebug) {
+ bun.unsafeAssert(remaining[first_set_byte] > 127);
for (0..first_set_byte) |j| {
- std.debug.assert(remaining[j] <= 127);
+ bun.unsafeAssert(remaining[j] <= 127);
}
}
@@ -3757,7 +3757,7 @@ pub fn firstNonASCIIWithType(comptime Type: type, slice: Type) ?u32 {
}
}
- if (comptime Environment.allow_assert) std.debug.assert(remaining.len < 8);
+ if (comptime Environment.allow_assert) assert(remaining.len < 8);
for (remaining) |*char| {
if (char.* > 127) {
@@ -3794,7 +3794,7 @@ pub fn indexOfNewlineOrNonASCIIOrANSI(slice_: []const u8, offset: u32) ?u32 {
remaining = remaining[ascii_vector_size..];
}
- if (comptime Environment.allow_assert) std.debug.assert(remaining.len < ascii_vector_size);
+ if (comptime Environment.allow_assert) assert(remaining.len < ascii_vector_size);
}
for (remaining) |*char_| {
@@ -3842,7 +3842,7 @@ pub fn indexOfNewlineOrNonASCIICheckStart(slice_: []const u8, offset: u32, compt
remaining = remaining[ascii_vector_size..];
}
- if (comptime Environment.allow_assert) std.debug.assert(remaining.len < ascii_vector_size);
+ if (comptime Environment.allow_assert) assert(remaining.len < ascii_vector_size);
}
for (remaining) |*char_| {
@@ -3877,7 +3877,7 @@ pub fn containsNewlineOrNonASCIIOrQuote(slice_: []const u8) bool {
remaining = remaining[ascii_vector_size..];
}
- if (comptime Environment.allow_assert) std.debug.assert(remaining.len < ascii_vector_size);
+ if (comptime Environment.allow_assert) assert(remaining.len < ascii_vector_size);
}
for (remaining) |*char_| {
@@ -3939,8 +3939,8 @@ pub fn indexOfCharZ(sliceZ: [:0]const u8, char: u8) ?u63 {
const ptr = bun.C.strchr(sliceZ.ptr, char) orelse return null;
const pos = @intFromPtr(ptr) - @intFromPtr(sliceZ.ptr);
- if (comptime Environment.allow_assert)
- std.debug.assert(@intFromPtr(sliceZ.ptr) <= @intFromPtr(ptr) and
+ if (comptime Environment.isDebug)
+ bun.assert(@intFromPtr(sliceZ.ptr) <= @intFromPtr(ptr) and
@intFromPtr(ptr) < @intFromPtr(sliceZ.ptr + sliceZ.len) and
pos <= sliceZ.len);
@@ -3961,8 +3961,8 @@ pub fn indexOfCharUsize(slice: []const u8, char: u8) ?usize {
const ptr = bun.C.memchr(slice.ptr, char, slice.len) orelse return null;
const i = @intFromPtr(ptr) - @intFromPtr(slice.ptr);
- std.debug.assert(i < slice.len);
- std.debug.assert(slice[i] == char);
+ bun.assert(i < slice.len);
+ bun.assert(slice[i] == char);
return i;
}
@@ -4114,8 +4114,8 @@ fn byte2hex(char: u8) u8 {
pub fn encodeBytesToHex(destination: []u8, source: []const u8) usize {
if (comptime Environment.allow_assert) {
- std.debug.assert(destination.len > 0);
- std.debug.assert(source.len > 0);
+ bun.unsafeAssert(destination.len > 0);
+ bun.unsafeAssert(source.len > 0);
}
const to_write = if (destination.len < source.len * 2)
destination.len - destination.len % 2
@@ -4410,7 +4410,7 @@ pub fn firstNonASCII16(comptime Slice: type, slice: Slice) ?u32 {
remaining.len -= (@intFromPtr(remaining.ptr) - @intFromPtr(remaining_start)) / 2;
}
- std.debug.assert(remaining.len < ascii_u16_vector_size);
+ bun.unsafeAssert(remaining.len < ascii_u16_vector_size);
}
var i: usize = (@intFromPtr(remaining.ptr) - @intFromPtr(remaining_start)) / 2;
@@ -4472,7 +4472,7 @@ test "indexOfNotChar" {
for (0..yes.len) |i| {
@memset(yes, 'a');
yes[i] = 'b';
- if (comptime Environment.allow_assert) std.debug.assert(indexOfNotChar(&yes, 'a').? == i);
+ if (comptime Environment.allow_assert) assert(indexOfNotChar(&yes, 'a').? == i);
i += 1;
}
}
@@ -4731,7 +4731,7 @@ pub fn isASCIIHexDigit(c: u8) bool {
}
pub fn toASCIIHexValue(character: u8) u8 {
- if (comptime Environment.allow_assert) std.debug.assert(isASCIIHexDigit(character));
+ if (comptime Environment.isDebug) assert(isASCIIHexDigit(character));
return switch (character) {
0...('A' - 1) => character - '0',
else => (character - 'A' + 10) & 0xF,
@@ -5111,20 +5111,20 @@ pub fn moveAllSlices(comptime Type: type, container: *Type, from: string, to: st
pub fn moveSlice(slice: string, from: string, to: string) string {
if (comptime Environment.allow_assert) {
- std.debug.assert(from.len <= to.len and from.len >= slice.len);
+ bun.unsafeAssert(from.len <= to.len and from.len >= slice.len);
// assert we are in bounds
- std.debug.assert(
+ bun.unsafeAssert(
(@intFromPtr(from.ptr) + from.len) >=
@intFromPtr(slice.ptr) + slice.len and
(@intFromPtr(from.ptr) <= @intFromPtr(slice.ptr)),
);
- std.debug.assert(eqlLong(from, to[0..from.len], false)); // data should be identical
+ bun.unsafeAssert(eqlLong(from, to[0..from.len], false)); // data should be identical
}
const ptr_offset = @intFromPtr(slice.ptr) - @intFromPtr(from.ptr);
const result = to[ptr_offset..][0..slice.len];
- if (comptime Environment.allow_assert) std.debug.assert(eqlLong(slice, result, false)); // data should be identical
+ if (comptime Environment.allow_assert) assert(eqlLong(slice, result, false)); // data should be identical
return result;
}
@@ -5237,7 +5237,7 @@ pub fn cloneNormalizingSeparators(
const base = withoutTrailingSlash(input);
var tokenized = std.mem.tokenize(u8, base, std.fs.path.sep_str);
var buf = try allocator.alloc(u8, base.len + 2);
- if (comptime Environment.allow_assert) std.debug.assert(base.len > 0);
+ if (comptime Environment.allow_assert) assert(base.len > 0);
if (base[0] == std.fs.path.sep) {
buf[0] = std.fs.path.sep;
}
@@ -5298,7 +5298,7 @@ pub fn concatWithLength(
@memcpy(remain[0..arg.len], arg);
remain = remain[arg.len..];
}
- std.debug.assert(remain.len == 0); // all bytes should be used
+ bun.unsafeAssert(remain.len == 0); // all bytes should be used
return out;
}
@@ -5372,7 +5372,7 @@ pub fn concatIfNeeded(
remain = remain[arg.len..];
}
- std.debug.assert(remain.len == 0);
+ bun.unsafeAssert(remain.len == 0);
}
/// This will simply ignore invalid UTF-8 and just do it
@@ -6304,3 +6304,5 @@ pub fn withoutPrefixComptime(input: []const u8, comptime prefix: []const u8) []c
// extern "C" bool icu_hasBinaryProperty(UChar32 cp, unsigned int prop)
extern fn icu_hasBinaryProperty(c: u32, which: c_uint) bool;
+
+const assert = bun.assert;
diff --git a/src/string_mutable.zig b/src/string_mutable.zig
index 42bfdd4fa11538..40cd995c303f21 100644
--- a/src/string_mutable.zig
+++ b/src/string_mutable.zig
@@ -143,7 +143,7 @@ pub const MutableString = struct {
}
if (comptime bun.Environment.allow_assert) {
- std.debug.assert(js_lexer.isIdentifier(mutable.list.items));
+ bun.assert(js_lexer.isIdentifier(mutable.list.items));
}
return try mutable.list.toOwnedSlice(allocator);
@@ -193,7 +193,7 @@ pub const MutableString = struct {
self: *MutableString,
index: usize,
) void {
- std.debug.assert(index <= self.list.capacity);
+ bun.assert(index <= self.list.capacity);
self.list.items.len = index;
}
@@ -215,7 +215,7 @@ pub const MutableString = struct {
try self.list.ensureUnusedCapacity(self.allocator, count);
const old = self.list.items.len;
self.list.items.len += count;
- std.debug.assert(count == bun.fmt.formatIntBuf(self.list.items.ptr[old .. old + count], int, 10, .lower, .{}));
+ bun.assert(count == bun.fmt.formatIntBuf(self.list.items.ptr[old .. old + count], int, 10, .lower, .{}));
}
pub inline fn appendAssumeCapacity(self: *MutableString, char: []const u8) void {
diff --git a/src/string_types.zig b/src/string_types.zig
index 1858cae1e40ab6..7e6a8f97674b51 100644
--- a/src/string_types.zig
+++ b/src/string_types.zig
@@ -140,7 +140,7 @@ pub const SmolStr = packed struct {
};
comptime {
- std.debug.assert(@sizeOf(SmolStr) == @sizeOf(Inlined));
+ bun.assert(@sizeOf(SmolStr) == @sizeOf(Inlined));
}
pub fn empty() SmolStr {
diff --git a/src/sync.zig b/src/sync.zig
index f37bbed11ae9ed..c25928074a89d5 100644
--- a/src/sync.zig
+++ b/src/sync.zig
@@ -450,11 +450,11 @@ pub fn Channel(
}
pub fn writeAll(self: *Self, items: []const T) !void {
- std.debug.assert((try self.writeItems(items, true)) == items.len);
+ bun.assert((try self.writeItems(items, true)) == items.len);
}
pub fn readAll(self: *Self, items: []T) !void {
- std.debug.assert((try self.readItems(items, true)) == items.len);
+ bun.assert((try self.readItems(items, true)) == items.len);
}
fn writeItems(self: *Self, items: []const T, should_block: bool) !usize {
@@ -537,7 +537,7 @@ pub const RwLock = if (@import("builtin").os.tag != .windows and @import("builti
};
const rc = std.c.pthread_rwlock_destroy(&self.rwlock);
- std.debug.assert(rc == .SUCCESS or rc == safe_rc);
+ bun.assert(rc == .SUCCESS or rc == safe_rc);
self.* = undefined;
}
@@ -548,12 +548,12 @@ pub const RwLock = if (@import("builtin").os.tag != .windows and @import("builti
pub fn lock(self: *RwLock) void {
const rc = pthread_rwlock_wrlock(&self.rwlock);
- std.debug.assert(rc == .SUCCESS);
+ bun.assert(rc == .SUCCESS);
}
pub fn unlock(self: *RwLock) void {
const rc = pthread_rwlock_unlock(&self.rwlock);
- std.debug.assert(rc == .SUCCESS);
+ bun.assert(rc == .SUCCESS);
}
pub fn tryLockShared(self: *RwLock) bool {
@@ -562,12 +562,12 @@ pub const RwLock = if (@import("builtin").os.tag != .windows and @import("builti
pub fn lockShared(self: *RwLock) void {
const rc = pthread_rwlock_rdlock(&self.rwlock);
- std.debug.assert(rc == .SUCCESS);
+ bun.assert(rc == .SUCCESS);
}
pub fn unlockShared(self: *RwLock) void {
const rc = pthread_rwlock_unlock(&self.rwlock);
- std.debug.assert(rc == .SUCCESS);
+ bun.assert(rc == .SUCCESS);
}
const PTHREAD_RWLOCK_INITIALIZER = pthread_rwlock_t{};
@@ -886,7 +886,7 @@ else if (@import("builtin").link_libc)
};
const rc = std.c.pthread_mutex_destroy(&self.mutex);
- std.debug.assert(rc == .SUCCESS or rc == safe_rc);
+ bun.assert(rc == .SUCCESS or rc == safe_rc);
self.* = undefined;
}
@@ -897,12 +897,12 @@ else if (@import("builtin").link_libc)
pub fn lock(self: *Mutex) void {
const rc = std.c.pthread_mutex_lock(&self.mutex);
- std.debug.assert(rc == .SUCCESS);
+ bun.assert(rc == .SUCCESS);
}
pub fn unlock(self: *Mutex) void {
const rc = std.c.pthread_mutex_unlock(&self.mutex);
- std.debug.assert(rc == .SUCCESS);
+ bun.assert(rc == .SUCCESS);
}
extern "c" fn pthread_mutex_trylock(m: *std.c.pthread_mutex_t) callconv(.C) c_int;
@@ -1041,7 +1041,7 @@ pub const Condvar = if (@import("builtin").os.tag == .windows)
@as(system.ULONG, 0),
);
- std.debug.assert(rc != system.FALSE);
+ bun.assert(rc != system.FALSE);
}
pub fn signal(self: *Condvar) void {
@@ -1080,24 +1080,24 @@ else if (@import("builtin").link_libc)
};
const rc = std.c.pthread_cond_destroy(&self.cond);
- std.debug.assert(rc == .SUCCESS or rc == safe_rc);
+ bun.assert(rc == .SUCCESS or rc == safe_rc);
self.* = undefined;
}
pub fn wait(self: *Condvar, mutex: *Mutex) void {
const rc = std.c.pthread_cond_wait(&self.cond, &mutex.mutex);
- std.debug.assert(rc == .SUCCESS);
+ bun.assert(rc == .SUCCESS);
}
pub fn signal(self: *Condvar) void {
const rc = std.c.pthread_cond_signal(&self.cond);
- std.debug.assert(rc == .SUCCESS);
+ bun.assert(rc == .SUCCESS);
}
pub fn broadcast(self: *Condvar) void {
const rc = std.c.pthread_cond_broadcast(&self.cond);
- std.debug.assert(rc == .SUCCESS);
+ bun.assert(rc == .SUCCESS);
}
}
else
diff --git a/src/sys.zig b/src/sys.zig
index b04c31e5a36cd1..33c34857552a73 100644
--- a/src/sys.zig
+++ b/src/sys.zig
@@ -223,7 +223,7 @@ pub const Error = struct {
};
pub inline fn withFd(this: Error, fd: anytype) Error {
- if (Environment.allow_assert) std.debug.assert(fd != bun.invalid_fd);
+ if (Environment.allow_assert) bun.assert(fd != bun.invalid_fd);
return Error{
.errno = this.errno,
.syscall = this.syscall,
@@ -822,7 +822,7 @@ pub fn openFileAtWindowsNtPath(
// Another problem re: normalization is that you can use relative paths, but no leading '.\' or './''
// this path is probably already backslash normalized so we're only going to check for '.\'
// const path = if (bun.strings.hasPrefixComptimeUTF16(path_maybe_leading_dot, ".\\")) path_maybe_leading_dot[2..] else path_maybe_leading_dot;
- // std.debug.assert(!bun.strings.hasPrefixComptimeUTF16(path_maybe_leading_dot, "./"));
+ // bun.assert(!bun.strings.hasPrefixComptimeUTF16(path_maybe_leading_dot, "./"));
assertIsValidWindowsPath(u16, path);
var result: windows.HANDLE = undefined;
@@ -1186,7 +1186,7 @@ pub fn write(fd: bun.FileDescriptor, bytes: []const u8) Maybe(usize) {
.windows => {
// "WriteFile sets this value to zero before doing any work or error checking."
var bytes_written: u32 = undefined;
- std.debug.assert(bytes.len > 0);
+ bun.assert(bytes.len > 0);
const rc = kernel32.WriteFile(
fd.cast(),
bytes.ptr,
@@ -2018,7 +2018,7 @@ pub fn munmap(memory: []align(mem.page_size) const u8) Maybe(void) {
pub fn setPipeCapacityOnLinux(fd: bun.FileDescriptor, capacity: usize) Maybe(usize) {
if (comptime !Environment.isLinux) @compileError("Linux-only");
- std.debug.assert(capacity > 0);
+ bun.assert(capacity > 0);
// In Linux versions before 2.6.11, the capacity of a
// pipe was the same as the system page size (e.g., 4096
@@ -2356,7 +2356,7 @@ pub fn linkatTmpfile(tmpfd: bun.FileDescriptor, dirfd: bun.FileDescriptor, name:
}
if (comptime Environment.allow_assert)
- std.debug.assert(!std.fs.path.isAbsolute(name)); // absolute path will get ignored.
+ bun.assert(!std.fs.path.isAbsolute(name)); // absolute path will get ignored.
return Maybe(void).errnoSysP(
std.os.linux.linkat(
diff --git a/src/sys_uv.zig b/src/sys_uv.zig
index 4b50b41a33ec79..4561d71040fead 100644
--- a/src/sys_uv.zig
+++ b/src/sys_uv.zig
@@ -22,7 +22,7 @@ const Maybe = JSC.Maybe;
const SystemError = JSC.SystemError;
comptime {
- std.debug.assert(Environment.isWindows);
+ bun.assert(Environment.isWindows);
}
pub const log = bun.sys.syslog;
@@ -177,7 +177,7 @@ pub fn readlink(file_path: [:0]const u8, buf: []u8) Maybe(usize) {
return .{ .err = .{ .errno = errno, .syscall = .readlink } };
} else {
// Seems like `rc` does not contain the errno?
- std.debug.assert(rc.int() == 0);
+ bun.assert(rc.int() == 0);
const slice = bun.span(req.ptrAs([*:0]u8));
if (slice.len > buf.len) {
log("uv readlink({s}) = {d}, {s} TRUNCATED", .{ file_path, rc.int(), slice });
@@ -319,7 +319,7 @@ pub fn closeAllowingStdoutAndStderr(fd: FileDescriptor) ?bun.sys.Error {
pub fn preadv(fd: FileDescriptor, bufs: []const bun.PlatformIOVec, position: i64) Maybe(usize) {
const uv_fd = bun.uvfdcast(fd);
- comptime std.debug.assert(bun.PlatformIOVec == uv.uv_buf_t);
+ comptime bun.assert(bun.PlatformIOVec == uv.uv_buf_t);
const debug_timer = bun.Output.DebugTimer.start();
@@ -353,7 +353,7 @@ pub fn preadv(fd: FileDescriptor, bufs: []const bun.PlatformIOVec, position: i64
pub fn pwritev(fd: FileDescriptor, bufs: []const bun.PlatformIOVecConst, position: i64) Maybe(usize) {
const uv_fd = bun.uvfdcast(fd);
- comptime std.debug.assert(bun.PlatformIOVec == uv.uv_buf_t);
+ comptime bun.assert(bun.PlatformIOVec == uv.uv_buf_t);
const debug_timer = bun.Output.DebugTimer.start();
diff --git a/src/thread_pool.zig b/src/thread_pool.zig
index 0bfa281f04348d..afe0efce07a4a7 100644
--- a/src/thread_pool.zig
+++ b/src/thread_pool.zig
@@ -7,7 +7,7 @@ const ThreadPool = @This();
const Futex = @import("./futex.zig");
const Environment = bun.Environment;
-const assert = std.debug.assert;
+const assert = bun.assert;
const Atomic = std.atomic.Value;
pub const OnSpawnCallback = *const fn (ctx: ?*anyopaque) ?*anyopaque;
@@ -389,7 +389,7 @@ test "parallel for loop" {
pub fn run(ctx: *@This(), value: u32, _: usize) void {
std.time.sleep(value);
ctx.completed += 1;
- std.debug.assert(ctx.completed <= ctx.total);
+ bun.assert(ctx.completed <= ctx.total);
}
};
const runny = try std.heap.page_allocator.create(Runner);
diff --git a/src/toml/toml_parser.zig b/src/toml/toml_parser.zig
index c31c70dcff5354..25d760526af090 100644
--- a/src/toml/toml_parser.zig
+++ b/src/toml/toml_parser.zig
@@ -26,7 +26,7 @@ const ExprNodeIndex = js_ast.ExprNodeIndex;
const ExprNodeList = js_ast.ExprNodeList;
const StmtNodeList = js_ast.StmtNodeList;
const BindingNodeList = js_ast.BindingNodeList;
-const assert = std.debug.assert;
+const assert = bun.assert;
const LocRef = js_ast.LocRef;
const S = js_ast.S;
diff --git a/src/url.zig b/src/url.zig
index 21c938b2e2a221..ba1de42f1ade3a 100644
--- a/src/url.zig
+++ b/src/url.zig
@@ -373,7 +373,7 @@ pub const URL = struct {
'@' => {
// we found a password, everything before this point in the slice is a password
url.password = str[0..i];
- if (Environment.allow_assert) std.debug.assert(str[i..].len < 2 or std.mem.readInt(u16, str[i..][0..2], .little) != std.mem.readInt(u16, "//", .little));
+ if (Environment.allow_assert) bun.assert(str[i..].len < 2 or std.mem.readInt(u16, str[i..][0..2], .little) != std.mem.readInt(u16, "//", .little));
return @intCast(i + 1);
},
// if we reach a slash or "?", there's no password
@@ -506,7 +506,7 @@ pub const QueryStringMap = struct {
var slice = this.map.list.slice();
const hash = slice.items(.name_hash)[this.i];
const name_slice = slice.items(.name)[this.i];
- std.debug.assert(name_slice.length > 0);
+ bun.assert(name_slice.length > 0);
var result = Result{ .name = this.map.str(name_slice), .values = target[0..1] };
target[0] = this.map.str(slice.items(.value)[this.i]);
@@ -522,7 +522,7 @@ pub const QueryStringMap = struct {
while (std.mem.indexOfScalar(u64, remainder_hashes[current_i..], hash)) |next_index| {
const real_i = current_i + next_index + this.i;
if (comptime Environment.isDebug) {
- std.debug.assert(!this.visited.isSet(real_i));
+ bun.assert(!this.visited.isSet(real_i));
}
this.visited.set(real_i);
@@ -608,7 +608,7 @@ pub const QueryStringMap = struct {
}
if (Environment.allow_assert)
- std.debug.assert(count > 0); // We should not call initWithScanner when there are no path params
+ bun.assert(count > 0); // We should not call initWithScanner when there are no path params
while (scanner.query.next()) |result| {
if (result.name_needs_decoding or result.value_needs_decoding) {
@@ -723,8 +723,8 @@ pub const QueryStringMap = struct {
if (nothing_needs_decoding) {
scanner = Scanner.init(query_string);
while (scanner.next()) |result| {
- if (Environment.allow_assert) std.debug.assert(!result.name_needs_decoding);
- if (Environment.allow_assert) std.debug.assert(!result.value_needs_decoding);
+ if (Environment.allow_assert) bun.assert(!result.name_needs_decoding);
+ if (Environment.allow_assert) bun.assert(!result.value_needs_decoding);
const name = result.name;
const value = result.value;
@@ -1282,7 +1282,7 @@ fn stringPointerFromStrings(parent: string, in: string) Api.StringPointer {
} else {
if (strings.indexOf(parent, in)) |i| {
if (comptime Environment.allow_assert) {
- std.debug.assert(strings.eqlLong(parent[i..][0..in.len], in, false));
+ bun.assert(strings.eqlLong(parent[i..][0..in.len], in, false));
}
return Api.StringPointer{
diff --git a/src/util.zig b/src/util.zig
index d3a7d9580bb2fa..da52409d56e2bb 100644
--- a/src/util.zig
+++ b/src/util.zig
@@ -257,7 +257,7 @@ pub fn Batcher(comptime Type: type) type {
}
pub inline fn done(this: *@This()) void {
- std.debug.assert(this.head.len == 0);
+ bun.assert(this.head.len == 0);
}
pub inline fn eat(this: *@This(), value: Type) *Type {
diff --git a/src/watcher.zig b/src/watcher.zig
index cb320ff8ac4dc5..57c5032b2c45b3 100644
--- a/src/watcher.zig
+++ b/src/watcher.zig
@@ -38,7 +38,7 @@ const INotify = struct {
name_len: u32,
pub fn name(this: *const INotifyEvent) [:0]u8 {
- if (comptime Environment.allow_assert) std.debug.assert(this.name_len > 0);
+ if (comptime Environment.allow_assert) bun.assert(this.name_len > 0);
// the name_len field is wrong
// it includes alignment / padding
@@ -49,7 +49,7 @@ const INotify = struct {
};
pub fn watchPath(this: *INotify, pathname: [:0]const u8) !EventListIndex {
- std.debug.assert(this.loaded_inotify);
+ bun.assert(this.loaded_inotify);
const old_count = this.watch_count.fetchAdd(1, .Release);
defer if (old_count == 0) Futex.wake(&this.watch_count, 10);
const watch_file_mask = std.os.linux.IN.EXCL_UNLINK | std.os.linux.IN.MOVE_SELF | std.os.linux.IN.DELETE_SELF | std.os.linux.IN.MOVED_TO | std.os.linux.IN.MODIFY;
@@ -57,7 +57,7 @@ const INotify = struct {
}
pub fn watchDir(this: *INotify, pathname: [:0]const u8) !EventListIndex {
- std.debug.assert(this.loaded_inotify);
+ bun.assert(this.loaded_inotify);
const old_count = this.watch_count.fetchAdd(1, .Release);
defer if (old_count == 0) Futex.wake(&this.watch_count, 10);
const watch_dir_mask = std.os.linux.IN.EXCL_UNLINK | std.os.linux.IN.DELETE | std.os.linux.IN.DELETE_SELF | std.os.linux.IN.CREATE | std.os.linux.IN.MOVE_SELF | std.os.linux.IN.ONLYDIR | std.os.linux.IN.MOVED_TO;
@@ -65,13 +65,13 @@ const INotify = struct {
}
pub fn unwatch(this: *INotify, wd: EventListIndex) void {
- std.debug.assert(this.loaded_inotify);
+ bun.assert(this.loaded_inotify);
_ = this.watch_count.fetchSub(1, .Release);
std.os.inotify_rm_watch(this.inotify_fd, wd);
}
pub fn init(this: *INotify, _: []const u8) !void {
- std.debug.assert(!this.loaded_inotify);
+ bun.assert(!this.loaded_inotify);
this.loaded_inotify = true;
if (bun.getenvZ("BUN_INOTIFY_COALESCE_INTERVAL")) |env| {
@@ -82,7 +82,7 @@ const INotify = struct {
}
pub fn read(this: *INotify) ![]*const INotifyEvent {
- std.debug.assert(this.loaded_inotify);
+ bun.assert(this.loaded_inotify);
restart: while (true) {
Futex.wait(&this.watch_count, 0, null) catch unreachable;
@@ -524,7 +524,7 @@ pub fn NewWatcher(comptime ContextType: type) type {
}
pub fn start(this: *Watcher) !void {
- std.debug.assert(this.watchloop_handle == null);
+ bun.assert(this.watchloop_handle == null);
this.thread = try std.Thread.spawn(.{}, Watcher.watchLoop, .{this});
}
@@ -620,7 +620,7 @@ pub fn NewWatcher(comptime ContextType: type) type {
fn _watchLoop(this: *Watcher) !void {
if (Environment.isMac) {
- std.debug.assert(this.platform.fd > 0);
+ bun.assert(this.platform.fd > 0);
const KEvent = std.c.Kevent;
var changelist_array: [128]KEvent = std.mem.zeroes([128]KEvent);
@@ -1037,7 +1037,7 @@ pub fn NewWatcher(comptime ContextType: type) type {
) !void {
if (comptime lock) this.mutex.lock();
defer if (comptime lock) this.mutex.unlock();
- std.debug.assert(file_path.len > 1);
+ bun.assert(file_path.len > 1);
const pathname = bun.fs.PathName.init(file_path);
const parent_dir = pathname.dirWithTrailingSlash();
@@ -1169,7 +1169,7 @@ pub fn NewWatcher(comptime ContextType: type) type {
}
pub fn removeAtIndex(this: *Watcher, index: WatchItemIndex, hash: HashType, parents: []HashType, comptime kind: WatchItem.Kind) void {
- std.debug.assert(index != no_watch_item);
+ bun.assert(index != no_watch_item);
this.evict_list[this.evict_list_i] = index;
this.evict_list_i += 1;
diff --git a/src/which.zig b/src/which.zig
index 59ae97ac1e0d30..3295877f2fd7f9 100644
--- a/src/which.zig
+++ b/src/which.zig
@@ -22,7 +22,7 @@ pub fn which(buf: *bun.PathBuffer, path: []const u8, cwd: []const u8, bin: []con
const result = whichWin(&convert_buf, path, cwd, bin) orelse return null;
const result_converted = bun.strings.convertUTF16toUTF8InBuffer(buf, result) catch unreachable;
buf[result_converted.len] = 0;
- std.debug.assert(result_converted.ptr == buf.ptr);
+ bun.assert(result_converted.ptr == buf.ptr);
return buf[0..result_converted.len :0];
}
@@ -78,7 +78,7 @@ pub fn endsWithExtension(str: []const u8) bool {
if (str[str.len - 4] != '.') return false;
const file_ext = str[str.len - 3 ..];
inline for (win_extensions) |ext| {
- comptime std.debug.assert(ext.len == 3);
+ comptime bun.assert(ext.len == 3);
if (bun.strings.eqlComptimeCheckLenWithType(u8, file_ext, ext, false)) return true;
}
return false;
@@ -173,7 +173,7 @@ test "which" {
var buf: bun.fs.PathBuffer = undefined;
const realpath = bun.getenvZ("PATH") orelse unreachable;
const whichbin = which(&buf, realpath, try bun.getcwdAlloc(std.heap.c_allocator), "which");
- try std.testing.expectEqualStrings(whichbin orelse return std.debug.assert(false), "/usr/bin/which");
+ try std.testing.expectEqualStrings(whichbin orelse return bun.assert(false), "/usr/bin/which");
try std.testing.expect(null == which(&buf, realpath, try bun.getcwdAlloc(std.heap.c_allocator), "baconnnnnn"));
try std.testing.expect(null != which(&buf, realpath, try bun.getcwdAlloc(std.heap.c_allocator), "zig"));
try std.testing.expect(null == which(&buf, realpath, try bun.getcwdAlloc(std.heap.c_allocator), "bin"));
diff --git a/src/windows_c.zig b/src/windows_c.zig
index 64a710358a7059..3a0f15b3a8b546 100644
--- a/src/windows_c.zig
+++ b/src/windows_c.zig
@@ -1318,10 +1318,10 @@ pub fn moveOpenedFileAt(
// supported in order to avoid either (1) using a redundant call that we can know in advance will return
// STATUS_NOT_SUPPORTED or (2) only setting IGNORE_READONLY_ATTRIBUTE when >= rs5
// and therefore having different behavior when the Windows version is >= rs1 but < rs5.
- comptime std.debug.assert(builtin.target.os.version_range.windows.min.isAtLeast(.win10_rs5));
+ comptime bun.assert(builtin.target.os.version_range.windows.min.isAtLeast(.win10_rs5));
if (bun.Environment.allow_assert) {
- std.debug.assert(std.mem.indexOfScalar(u16, new_file_name, '/') == null); // Call moveOpenedFileAtLoose
+ bun.assert(std.mem.indexOfScalar(u16, new_file_name, '/') == null); // Call moveOpenedFileAtLoose
}
const struct_buf_len = @sizeOf(w.FILE_RENAME_INFORMATION_EX) + (bun.MAX_PATH_BYTES - 1);
@@ -1372,7 +1372,7 @@ pub fn moveOpenedFileAtLoose(
new_path: []const u16,
replace_if_exists: bool,
) Maybe(void) {
- std.debug.assert(std.mem.indexOfScalar(u16, new_path, '/') == null); // Call bun.strings.toWPathNormalized first
+ bun.assert(std.mem.indexOfScalar(u16, new_path, '/') == null); // Call bun.strings.toWPathNormalized first
const without_leading_dot_slash = if (new_path.len >= 2 and new_path[0] == '.' and new_path[1] == '\\')
new_path[2..]
@@ -1406,7 +1406,7 @@ const FILE_DISPOSITION_IGNORE_READONLY_ATTRIBUTE: w.ULONG = 0x00000010;
///
/// NOTE: THE FILE MUST BE OPENED WITH ACCESS_MASK "DELETE" OR THIS WILL FAIL
pub fn deleteOpenedFile(fd: bun.FileDescriptor) Maybe(void) {
- comptime std.debug.assert(builtin.target.os.version_range.windows.min.isAtLeast(.win10_rs5));
+ comptime bun.assert(builtin.target.os.version_range.windows.min.isAtLeast(.win10_rs5));
var info = w.FILE_DISPOSITION_INFORMATION_EX{
.Flags = FILE_DISPOSITION_DELETE |
FILE_DISPOSITION_POSIX_SEMANTICS |
diff --git a/src/wyhash.zig b/src/wyhash.zig
index 34a69f89cd81e9..1d3a1a0ad8f80c 100644
--- a/src/wyhash.zig
+++ b/src/wyhash.zig
@@ -1,6 +1,7 @@
//
// this file is a copy of Wyhash from the zig standard library, version v0.11.0-dev.2609+5e19250a1
//
+const assert = if (@hasDecl(@import("root"), "bun")) @import("root").bun.assert else @import("std").debug.assert;
const std = @import("std");
const mem = std.mem;
@@ -51,7 +52,7 @@ const WyhashStateless = struct {
}
inline fn round(self: *WyhashStateless, b: []const u8) void {
- std.debug.assert(b.len == 32);
+ assert(b.len == 32);
self.seed = mix0(
read_bytes(8, b[0..]),
@@ -65,7 +66,7 @@ const WyhashStateless = struct {
}
pub inline fn update(self: *WyhashStateless, b: []const u8) void {
- std.debug.assert(b.len % 32 == 0);
+ assert(b.len % 32 == 0);
var off: usize = 0;
while (off < b.len) : (off += 32) {
@@ -77,7 +78,7 @@ const WyhashStateless = struct {
}
pub inline fn final(self: *WyhashStateless, b: []const u8) u64 {
- std.debug.assert(b.len < 32);
+ assert(b.len < 32);
const seed = self.seed;
const rem_len = @as(u5, @intCast(b.len));