diff --git a/docs/bundler/loaders.md b/docs/bundler/loaders.md
index 57073f19d73062..b1941e121c6114 100644
--- a/docs/bundler/loaders.md
+++ b/docs/bundler/loaders.md
@@ -80,6 +80,9 @@ TOML files can be directly imported. Bun will parse them with its fast native TO
```ts
import config from "./bunfig.toml";
config.logLevel; // => "debug"
+
+// via import attribute:
+// import myCustomTOML from './my.config' with {type: "toml"};
```
During bundling, the parsed TOML is inlined into the bundle as a JavaScript object.
@@ -122,6 +125,10 @@ Text files can be directly imported. The file is read and returned as a string.
```ts
import contents from "./file.txt";
console.log(contents); // => "Hello, world!"
+
+// To import an html file as text
+// The "type' attribute can be used to override the default loader.
+import html from "./index.html" with { type: "text" };
```
When referenced during a build, the contents are into the bundle as a string.
diff --git a/docs/guides/runtime/import-html.md b/docs/guides/runtime/import-html.md
new file mode 100644
index 00000000000000..a8ee057669181f
--- /dev/null
+++ b/docs/guides/runtime/import-html.md
@@ -0,0 +1,15 @@
+---
+name: Import HTML file as text
+---
+
+To import a `.html` file in Bun as a text file, use the `type: "text"` attribute in the import statement.
+
+```ts
+import html from "./file.html" with { type: "text" };
+
+console.log(html); //
...
+```
+
+This can also be used with hot module reloading and/or watch mode to force Bun to reload whenever the `./file.html` file changes.
+
+This feature was added in Bun v1.1.5.
diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp
index 073f7387a9d3ac..70eda447447f86 100644
--- a/src/bun.js/bindings/ZigGlobalObject.cpp
+++ b/src/bun.js/bindings/ZigGlobalObject.cpp
@@ -3866,8 +3866,25 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderImportModule(JSGlobalObject* j
resolvedIdentifier = JSC::Identifier::fromString(vm, makeString(resolved.result.value.toWTFString(BunString::ZeroCopy), Zig::toString(queryString)));
}
+ // This gets passed through the "parameters" argument to moduleLoaderFetch.
+ // Therefore, we modify it in place.
+ if (parameters && parameters.isObject()) {
+ auto* object = parameters.toObject(globalObject);
+ if (auto withObject = object->getIfPropertyExists(globalObject, vm.propertyNames->withKeyword)) {
+ if (withObject.isObject()) {
+ auto* with = jsCast(withObject);
+ if (auto type = with->getIfPropertyExists(globalObject, vm.propertyNames->type)) {
+ if (type.isString()) {
+ const auto typeString = type.toWTFString(globalObject);
+ parameters = JSC::JSScriptFetchParameters::create(vm, ScriptFetchParameters::create(typeString));
+ }
+ }
+ }
+ }
+ }
+
auto result = JSC::importModule(globalObject, resolvedIdentifier,
- JSC::jsUndefined(), parameters, JSC::jsUndefined());
+ JSC::jsUndefined(), parameters, jsUndefined());
RETURN_IF_EXCEPTION(scope, promise->rejectWithCaughtException(globalObject, scope));
return result;
diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig
index b10065c91272aa..6173bf4f3557ab 100644
--- a/src/bun.js/module_loader.zig
+++ b/src/bun.js/module_loader.zig
@@ -2254,6 +2254,14 @@ pub const ModuleLoader = struct {
if (type_attribute) |attribute| {
if (attribute.eqlComptime("sqlite")) {
loader = .sqlite;
+ } else if (attribute.eqlComptime("text")) {
+ loader = .text;
+ } else if (attribute.eqlComptime("json")) {
+ loader = .json;
+ } else if (attribute.eqlComptime("toml")) {
+ loader = .toml;
+ } else if (attribute.eqlComptime("file")) {
+ loader = .file;
}
}
diff --git a/src/bundler.zig b/src/bundler.zig
index 9f466107cd7926..b82421b9839e7d 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -1597,7 +1597,7 @@ pub const Bundler = struct {
},
// TODO: use lazy export AST
.text => {
- const expr = js_ast.Expr.init(js_ast.E.String, js_ast.E.String{
+ const expr = js_ast.Expr.init(js_ast.E.UTF8String, js_ast.E.UTF8String{
.data = source.contents,
}, logger.Loc.Empty);
const stmt = js_ast.Stmt.alloc(js_ast.S.ExportDefault, js_ast.S.ExportDefault{
diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig
index 8e16b8981ed438..94f9966b7726b4 100644
--- a/src/bundler/bundle_v2.zig
+++ b/src/bundler/bundle_v2.zig
@@ -610,7 +610,10 @@ pub const BundleV2 = struct {
.index = source_index,
},
.loader = loader,
- .side_effects = _resolver.SideEffects.has_side_effects,
+ .side_effects = switch (loader) {
+ .text, .json, .toml, .file => _resolver.SideEffects.no_side_effects__pure_data,
+ else => _resolver.SideEffects.has_side_effects,
+ },
}) catch @panic("Ran out of memory");
var task = this.graph.allocator.create(ParseTask) catch @panic("Ran out of memory");
task.* = ParseTask.init(&resolve_result, source_index, this);
@@ -2587,9 +2590,8 @@ pub const ParseTask = struct {
return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?);
},
.text => {
- const root = Expr.init(E.String, E.String{
+ const root = Expr.init(E.UTF8String, E.UTF8String{
.data = source.contents,
- .prefer_template = true,
}, Logger.Loc{ .start = 0 });
return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?);
},
diff --git a/src/import_record.zig b/src/import_record.zig
index 560d0f6a1650dc..93e18c124d6a30 100644
--- a/src/import_record.zig
+++ b/src/import_record.zig
@@ -214,15 +214,37 @@ pub const ImportRecord = struct {
with_type_sqlite,
with_type_sqlite_embedded,
+ with_type_text,
+ with_type_json,
+ with_type_toml,
+ with_type_file,
pub fn loader(this: Tag) ?bun.options.Loader {
return switch (this) {
.with_type_sqlite => .sqlite,
.with_type_sqlite_embedded => .sqlite_embedded,
+ .with_type_text => .text,
+ .with_type_json => .json,
+ .with_type_toml => .toml,
+ .with_type_file => .file,
else => null,
};
}
+ pub fn onlySupportsDefaultImports(this: Tag) bool {
+ return switch (this) {
+ .with_type_file, .with_type_text => true,
+ else => false,
+ };
+ }
+
+ pub fn isSQLite(this: Tag) bool {
+ return switch (this) {
+ .with_type_sqlite, .with_type_sqlite_embedded => true,
+ else => false,
+ };
+ }
+
pub fn isReactReference(this: Tag) bool {
return switch (this) {
.react_client_component, .react_server_component => true,
diff --git a/src/js_ast.zig b/src/js_ast.zig
index 118d6c808b7279..2f87da87f56584 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -2665,6 +2665,8 @@ pub const E = struct {
pub const Import = struct {
expr: ExprNodeIndex,
import_record_index: u32,
+ // This will be dynamic at some point.
+ type_attribute: TypeAttribute = .none,
/// Comments inside "import()" expressions have special meaning for Webpack.
/// Preserving comments inside these expressions makes it possible to use
@@ -2679,6 +2681,24 @@ pub const E = struct {
pub fn isImportRecordNull(this: *const Import) bool {
return this.import_record_index == std.math.maxInt(u32);
}
+
+ pub const TypeAttribute = enum {
+ none,
+ json,
+ toml,
+ text,
+ file,
+
+ pub fn tag(this: TypeAttribute) ImportRecord.Tag {
+ return switch (this) {
+ .none => .none,
+ .json => .with_type_json,
+ .toml => .with_type_toml,
+ .text => .with_type_text,
+ .file => .with_type_file,
+ };
+ }
+ };
};
};
diff --git a/src/js_parser.zig b/src/js_parser.zig
index af154af1d32af9..3d9ab3b880b9fa 100644
--- a/src/js_parser.zig
+++ b/src/js_parser.zig
@@ -328,6 +328,7 @@ const TransposeState = struct {
is_then_catch_target: bool = false,
is_require_immediately_assigned_to_decl: bool = false,
loc: logger.Loc = logger.Loc.Empty,
+ type_attribute: E.Import.TypeAttribute = .none,
};
var true_args = &[_]Expr{
@@ -4960,11 +4961,16 @@ fn NewParser_(
}
const import_record_index = p.addImportRecord(.dynamic, arg.loc, arg.data.e_string.slice(p.allocator));
+
+ if (state.type_attribute.tag() != .none) {
+ p.import_records.items[import_record_index].tag = state.type_attribute.tag();
+ }
p.import_records.items[import_record_index].handles_import_errors = (state.is_await_target and p.fn_or_arrow_data_visit.try_body_count != 0) or state.is_then_catch_target;
p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable;
return p.newExpr(E.Import{
.expr = arg,
.import_record_index = Ref.toInt(import_record_index),
+ .type_attribute = state.type_attribute,
// .leading_interior_comments = arg.getString().
}, state.loc);
}
@@ -4978,6 +4984,7 @@ fn NewParser_(
return p.newExpr(E.Import{
.expr = arg,
.import_record_index = std.math.maxInt(u32),
+ .type_attribute = state.type_attribute,
}, state.loc);
}
@@ -8841,7 +8848,7 @@ fn NewParser_(
}
if (path.import_tag != .none) {
- try p.validateSQLiteImportType(path.import_tag, &stmt);
+ try p.validateImportType(path.import_tag, &stmt);
}
// Track the items for this namespace
@@ -8849,20 +8856,33 @@ fn NewParser_(
return p.s(stmt, loc);
}
- fn validateSQLiteImportType(p: *P, import_tag: ImportRecord.Tag, stmt: *S.Import) !void {
+ fn validateImportType(p: *P, import_tag: ImportRecord.Tag, stmt: *S.Import) !void {
@setCold(true);
- if (import_tag == .with_type_sqlite or import_tag == .with_type_sqlite_embedded) {
+ if (import_tag.loader() != null) {
p.import_records.items[stmt.import_record_index].tag = import_tag;
- for (stmt.items) |*item| {
- if (!(strings.eqlComptime(item.alias, "default") or strings.eqlComptime(item.alias, "db"))) {
- try p.log.addError(
- p.source,
- item.name.loc,
- "sqlite imports only support the \"default\" or \"db\" imports",
- );
- break;
+ if (import_tag.isSQLite()) {
+ for (stmt.items) |*item| {
+ if (!(strings.eqlComptime(item.alias, "default") or strings.eqlComptime(item.alias, "db"))) {
+ try p.log.addError(
+ p.source,
+ item.name.loc,
+ "sqlite imports only support the \"default\" or \"db\" imports",
+ );
+ break;
+ }
+ }
+ } else if (import_tag.onlySupportsDefaultImports()) {
+ for (stmt.items) |*item| {
+ if (!(strings.eqlComptime(item.alias, "default"))) {
+ try p.log.addError(
+ p.source,
+ item.name.loc,
+ "This loader type only supports the \"default\" import",
+ );
+ break;
+ }
}
}
}
@@ -11696,7 +11716,6 @@ fn NewParser_(
try p.lexer.expect(.t_string_literal);
}
- // For now, we silently strip import assertions
if (!p.lexer.has_newline_before and (
// Import Assertions are deprecated.
// Import Attributes are the new way to do this.
@@ -11751,13 +11770,22 @@ fn NewParser_(
if (supported_attribute) |attr| {
switch (attr) {
.type => {
- if (strings.eqlComptime(p.lexer.string_literal_slice, "macro")) {
+ const type_attr = p.lexer.string_literal_slice;
+ if (strings.eqlComptime(type_attr, "macro")) {
path.is_macro = true;
- } else if (strings.eqlComptime(p.lexer.string_literal_slice, "sqlite")) {
+ } else if (strings.eqlComptime(type_attr, "sqlite")) {
path.import_tag = .with_type_sqlite;
if (has_seen_embed_true) {
path.import_tag = .with_type_sqlite_embedded;
}
+ } else if (strings.eqlComptime(type_attr, "json")) {
+ path.import_tag = .with_type_json;
+ } else if (strings.eqlComptime(type_attr, "toml")) {
+ path.import_tag = .with_type_toml;
+ } else if (strings.eqlComptime(type_attr, "text")) {
+ path.import_tag = .with_type_text;
+ } else if (strings.eqlComptime(type_attr, "file")) {
+ path.import_tag = .with_type_file;
}
},
.embed => {
@@ -14970,6 +14998,8 @@ fn NewParser_(
const value = try p.parseExpr(.comma);
+ var type_attribute = E.Import.TypeAttribute.none;
+
if (p.lexer.token == .t_comma) {
// "import('./foo.json', )"
try p.lexer.next();
@@ -14977,7 +15007,28 @@ fn NewParser_(
if (p.lexer.token != .t_close_paren) {
// for now, we silently strip import assertions
// "import('./foo.json', { assert: { type: 'json' } })"
- _ = try p.parseExpr(.comma);
+ const import_expr = try p.parseExpr(.comma);
+ if (import_expr.data == .e_object) {
+ if (import_expr.data.e_object.get("with") orelse import_expr.data.e_object.get("assert")) |with| {
+ if (with.data == .e_object) {
+ const with_object = with.data.e_object;
+ if (with_object.get("type")) |field| {
+ if (field.data == .e_string) {
+ const str = field.data.e_string;
+ if (str.eqlComptime("json")) {
+ type_attribute = .json;
+ } else if (str.eqlComptime("toml")) {
+ type_attribute = .toml;
+ } else if (str.eqlComptime("text")) {
+ type_attribute = .text;
+ } else if (str.eqlComptime("file")) {
+ type_attribute = .file;
+ }
+ }
+ }
+ }
+ }
+ }
if (p.lexer.token == .t_comma) {
// "import('./foo.json', { assert: { type: 'json' } }, , )"
@@ -14998,11 +15049,17 @@ fn NewParser_(
.expr = value,
.leading_interior_comments = comments,
.import_record_index = import_record_index,
+ .type_attribute = type_attribute,
}, loc);
}
}
- return p.newExpr(E.Import{ .expr = value, .leading_interior_comments = comments, .import_record_index = std.math.maxInt(u32) }, loc);
+ return p.newExpr(E.Import{
+ .expr = value,
+ .type_attribute = type_attribute,
+ .leading_interior_comments = comments,
+ .import_record_index = std.math.maxInt(u32),
+ }, loc);
}
fn parseJSXPropValueIdentifier(p: *P, previous_string_with_backslash_loc: *logger.Loc) !Expr {
@@ -16837,6 +16894,7 @@ fn NewParser_(
.is_await_target = if (p.await_target != null) p.await_target.? == .e_import and p.await_target.?.e_import == e_ else false,
.is_then_catch_target = p.then_catch_chain.has_catch and std.meta.activeTag(p.then_catch_chain.next_target) == .e_import and expr.data.e_import == p.then_catch_chain.next_target.e_import,
.loc = e_.expr.loc,
+ .type_attribute = e_.type_attribute,
};
e_.expr = p.visitExpr(e_.expr);
diff --git a/src/js_printer.zig b/src/js_printer.zig
index 642c5940a1c609..4ba133c1a89182 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -171,15 +171,27 @@ fn ws(comptime str: []const u8) Whitespacer {
pub fn estimateLengthForJSON(input: []const u8, comptime ascii_only: bool) usize {
var remaining = input;
- var len: u32 = 2; // for quotes
+ var len: usize = 2; // for quotes
while (strings.indexOfNeedsEscape(remaining)) |i| {
len += i;
remaining = remaining[i..];
const char_len = strings.wtf8ByteSequenceLengthWithInvalid(remaining[0]);
- const c = strings.decodeWTF8RuneT(remaining.ptr[0..4], char_len, i32, 0);
+ const c = strings.decodeWTF8RuneT(
+ &switch (char_len) {
+ // 0 is not returned by `wtf8ByteSequenceLengthWithInvalid`
+ 1 => .{ remaining[0], 0, 0, 0 },
+ 2 => remaining[0..2].* ++ .{ 0, 0 },
+ 3 => remaining[0..3].* ++ .{0},
+ 4 => remaining[0..4].*,
+ else => unreachable,
+ },
+ char_len,
+ i32,
+ 0,
+ );
if (canPrintWithoutEscape(i32, c, ascii_only)) {
- len += @as(u32, char_len);
+ len += @as(usize, char_len);
} else if (c <= 0xFFFF) {
len += 6;
} else {
@@ -187,7 +199,7 @@ pub fn estimateLengthForJSON(input: []const u8, comptime ascii_only: bool) usize
}
remaining = remaining[char_len..];
} else {
- return @as(u32, @truncate(remaining.len)) + 2;
+ return remaining.len + 2;
}
return len;
@@ -206,12 +218,28 @@ pub fn quoteForJSONBuffer(text: []const u8, bytes: *MutableString, comptime asci
const n: usize = text.len;
while (i < n) {
const width = strings.wtf8ByteSequenceLengthWithInvalid(text[i]);
- const c = strings.decodeWTF8RuneT(text.ptr[i .. i + 4][0..4], width, i32, 0);
+ const clamped_width = @min(@as(usize, width), n -| i);
+ const c = strings.decodeWTF8RuneT(
+ &switch (clamped_width) {
+ // 0 is not returned by `wtf8ByteSequenceLengthWithInvalid`
+ 1 => .{ text[i], 0, 0, 0 },
+ 2 => text[i..][0..2].* ++ .{ 0, 0 },
+ 3 => text[i..][0..3].* ++ .{0},
+ 4 => text[i..][0..4].*,
+ else => unreachable,
+ },
+ width,
+ i32,
+ 0,
+ );
if (canPrintWithoutEscape(i32, c, ascii_only)) {
- const remain = text[i + @as(usize, width) ..];
+ const remain = text[i + clamped_width ..];
if (strings.indexOfNeedsEscape(remain)) |j| {
- try bytes.appendSlice(text[i .. i + j + @as(usize, width)]);
- i += j + @as(usize, width);
+ const text_chunk = text[i .. i + clamped_width];
+ try bytes.appendSlice(text_chunk);
+ i += clamped_width;
+ try bytes.appendSlice(remain[0..j]);
+ i += j;
continue;
} else {
try bytes.appendSlice(text[i..]);
@@ -2049,6 +2077,37 @@ fn NewPrinter(
// Allow it to fail at runtime, if it should
p.print("import(");
p.printImportRecordPath(record);
+
+ switch (record.tag) {
+ .with_type_sqlite, .with_type_sqlite_embedded => {
+ // we do not preserve "embed": "true" since it is not necessary
+ p.printWhitespacer(ws(", { with: { type: \"sqlite\" } }"));
+ },
+ .with_type_text => {
+ if (comptime is_bun_platform) {
+ p.printWhitespacer(ws(", { with: { type: \"text\" } }"));
+ }
+ },
+ .with_type_json => {
+ // backwards compatibility: previously, we always stripped type json
+ if (comptime is_bun_platform) {
+ p.printWhitespacer(ws(", { with: { type: \"json\" } }"));
+ }
+ },
+ .with_type_toml => {
+ // backwards compatibility: previously, we always stripped type
+ if (comptime is_bun_platform) {
+ p.printWhitespacer(ws(", { with: { type: \"toml\" } }"));
+ }
+ },
+ .with_type_file => {
+ // backwards compatibility: previously, we always stripped type
+ if (comptime is_bun_platform) {
+ p.printWhitespacer(ws(", { with: { type: \"file\" } }"));
+ }
+ },
+ else => {},
+ }
p.print(")");
if (leading_interior_comments.len > 0) {
@@ -2448,6 +2507,26 @@ fn NewPrinter(
}
p.printExpr(e.expr, .comma, ExprFlag.None());
+ if (comptime is_bun_platform) {
+ // since we previously stripped type, it is a breaking change to
+ // enable this for non-bun platforms
+ switch (e.type_attribute) {
+ .none => {},
+ .text => {
+ p.printWhitespacer(ws(", { with: { type: \"text\" } }"));
+ },
+ .json => {
+ p.printWhitespacer(ws(", { with: { type: \"json\" } }"));
+ },
+ .toml => {
+ p.printWhitespacer(ws(", { with: { type: \"toml\" } }"));
+ },
+ .file => {
+ p.printWhitespacer(ws(", { with: { type: \"file\" } }"));
+ },
+ }
+ }
+
if (e.leading_interior_comments.len > 0) {
p.printNewline();
p.options.unindent();
@@ -4711,9 +4790,35 @@ fn NewPrinter(
p.printImportRecordPath(record);
- if ((record.tag.loader() orelse options.Loader.file).isSQLite()) {
- // we do not preserve "embed": "true" since it is not necessary
- p.printWhitespacer(ws(" with { type: \"sqlite\" }"));
+ switch (record.tag) {
+ .with_type_sqlite, .with_type_sqlite_embedded => {
+ // we do not preserve "embed": "true" since it is not necessary
+ p.printWhitespacer(ws(" with { type: \"sqlite\" }"));
+ },
+ .with_type_text => {
+ if (comptime is_bun_platform) {
+ p.printWhitespacer(ws(" with { type: \"text\" }"));
+ }
+ },
+ .with_type_json => {
+ // backwards compatibility: previously, we always stripped type json
+ if (comptime is_bun_platform) {
+ p.printWhitespacer(ws(" with { type: \"json\" }"));
+ }
+ },
+ .with_type_toml => {
+ // backwards compatibility: previously, we always stripped type
+ if (comptime is_bun_platform) {
+ p.printWhitespacer(ws(" with { type: \"toml\" }"));
+ }
+ },
+ .with_type_file => {
+ // backwards compatibility: previously, we always stripped type
+ if (comptime is_bun_platform) {
+ p.printWhitespacer(ws(" with { type: \"file\" }"));
+ }
+ },
+ else => {},
}
p.printSemicolonAfterStatement();
},
diff --git a/test/bundler/bundler_loader.test.ts b/test/bundler/bundler_loader.test.ts
new file mode 100644
index 00000000000000..26c1c5bd511dc7
--- /dev/null
+++ b/test/bundler/bundler_loader.test.ts
@@ -0,0 +1,97 @@
+import { fileURLToPath, pathToFileURL } from "bun";
+import { itBundled, testForFile } from "./expectBundled";
+var { describe, test, expect } = testForFile(import.meta.path);
+
+describe("bundler", async () => {
+ for (let target of ["bun", "node"] as const) {
+ describe(`${target} loader`, async () => {
+ itBundled("bun/loader-text-file", {
+ target,
+ outfile: "",
+ outdir: "/out",
+
+ files: {
+ "/entry.ts": /* js */ `
+ import hello from './hello.foo' with {type: "text"};
+ console.log(hello);
+ `,
+ "/hello.foo": "Hello, world!",
+ },
+ run: { stdout: "Hello, world!" },
+ });
+ itBundled("bun/loader-json-file", {
+ target,
+ files: {
+ "/entry.ts": /* js */ `
+ import hello from './hello.notjson' with {type: "json"};
+ console.write(JSON.stringify(hello));
+ `,
+ "/hello.notjson": JSON.stringify({ hello: "world" }),
+ },
+ run: { stdout: '{"hello":"world"}' },
+ });
+ itBundled("bun/loader-toml-file", {
+ target,
+ files: {
+ "/entry.ts": /* js */ `
+ import hello from './hello.nottoml' with {type: "toml"};
+ console.write(JSON.stringify(hello));
+ `,
+ "/hello.nottoml": `hello = "world"`,
+ },
+ run: { stdout: '{"hello":"world"}' },
+ });
+ itBundled("bun/loader-text-file", {
+ target,
+ files: {
+ "/entry.ts": /* js */ `
+ import hello from './hello.json' with {type: "text"};
+ console.write(hello);
+ `,
+ "/hello.json": JSON.stringify({ hello: "world" }),
+ },
+ run: { stdout: '{"hello":"world"}' },
+ });
+ });
+ }
+ itBundled("bun/loader-text-file", {
+ target: "bun",
+ outfile: "",
+ outdir: "/out",
+
+ files: {
+ "/entry.ts": /* js */ `
+ import first from './1.boo' with {type: "text"};
+ import second from './2.boo' with {type: "text"};
+ console.write(first + second);
+ `,
+ "/1.boo": "'`Hello, \nworld!`",
+ "/2.boo": "`${Hello}\n, world!`'",
+ },
+ run: {
+ stdout: "'`Hello, \nworld!``${Hello}\n, world!`'",
+ },
+ });
+
+ const moon = await Bun.file(
+ fileURLToPath(import.meta.resolve("../js/bun/util/text-loader-fixture-text-file.backslashes.txt")),
+ ).text();
+
+ // https://github.com/oven-sh/bun/issues/3449
+ itBundled("bun/loader-text-file-#3449", {
+ target: "bun",
+ outfile: "",
+ outdir: "/out",
+
+ files: {
+ "/entry.ts": /* js */ `
+ import first from './1.boo' with {type: "text"};
+ console.write(first);
+ `,
+ "/1.boo": moon,
+ },
+ run: {
+ stdout: moon,
+ },
+ });
+});
diff --git a/test/js/bun/resolve/toml/toml-fixture.toml.txt b/test/js/bun/resolve/toml/toml-fixture.toml.txt
new file mode 100644
index 00000000000000..5b7df33af2b5af
--- /dev/null
+++ b/test/js/bun/resolve/toml/toml-fixture.toml.txt
@@ -0,0 +1,42 @@
+
+framework = "next"
+origin = "http://localhost:5000"
+inline.array = [1234, 4, 5, 6]
+
+
+[macros]
+react-relay = { "graphql" = "node_modules/bun-macro-relay/bun-macro-relay.tsx" }
+
+[install.scopes]
+"@mybigcompany2" = { "token" = "123456", "url" = "https://registry.mybigcompany.com" }
+"@mybigcompany3" = { "token" = "123456", "url" = "https://registry.mybigcompany.com", "three" = 4 }
+
+
+[install.scopes."@mybigcompany"]
+token = "123456"
+url = "https://registry.mybigcompany.com"
+
+[bundle.packages]
+"@emotion/react" = true
+
+[install.cache]
+dir = "C:\\Windows\\System32"
+dir2 = "C:\\Windows\\System32\\🏳️🌈"
+
+[dev]
+foo = 123
+"foo.bar" = "baz"
+"abba.baba" = "baba"
+dabba = -123
+doo = 123.456
+one.two.three = 4
+
+[[array]]
+entry_one = "one"
+entry_two = "two"
+
+[[array]]
+entry_one = "three"
+
+[[array.nested]]
+entry_one = "four"
diff --git a/test/js/bun/resolve/toml/toml.test.js b/test/js/bun/resolve/toml/toml.test.js
index 652d7e6d44c2de..ce5cb2923b351a 100644
--- a/test/js/bun/resolve/toml/toml.test.js
+++ b/test/js/bun/resolve/toml/toml.test.js
@@ -1,12 +1,7 @@
-import { describe, it, expect } from "bun:test";
-import { gc } from "harness";
-
-it("syntax", async () => {
- gc();
-
- const toml = (await import("./toml-fixture.toml")).default;
- gc();
+import { expect, it } from "bun:test";
+import tomlFromCustomTypeAttribute from "./toml-fixture.toml.txt" with { type: "toml" };
+function checkToml(toml) {
expect(toml.framework).toBe("next");
expect(toml.bundle.packages["@emotion/react"]).toBe(true);
expect(toml.array[0].entry_one).toBe("one");
@@ -24,6 +19,19 @@ it("syntax", async () => {
expect(toml.install.scopes["@mybigcompany3"].three).toBe(4);
expect(toml.install.cache.dir).toBe("C:\\Windows\\System32");
expect(toml.install.cache.dir2).toBe("C:\\Windows\\System32\\🏳️🌈");
+}
+
+it("via dynamic import", async () => {
+ const toml = (await import("./toml-fixture.toml")).default;
+ checkToml(toml);
+});
+
+it("via import type toml", async () => {
+ checkToml(tomlFromCustomTypeAttribute);
+});
- gc();
+it("via dynamic import with type attribute", async () => {
+ delete require.cache[require.resolve("./toml-fixture.toml.txt")];
+ const toml = (await import("./toml-fixture.toml.txt", { with: { type: "toml" } })).default;
+ checkToml(toml);
});
diff --git a/test/js/bun/util/text-loader-fixture-import-backslashes.ts b/test/js/bun/util/text-loader-fixture-import-backslashes.ts
new file mode 100644
index 00000000000000..9c478569c13747
--- /dev/null
+++ b/test/js/bun/util/text-loader-fixture-import-backslashes.ts
@@ -0,0 +1,3 @@
+import text from "./text-loader-fixture-text-file.backslashes.txt";
+
+console.write(text);
diff --git a/test/js/bun/util/text-loader-fixture-import-latin1.ts b/test/js/bun/util/text-loader-fixture-import-latin1.ts
new file mode 100644
index 00000000000000..213158b78bdb45
--- /dev/null
+++ b/test/js/bun/util/text-loader-fixture-import-latin1.ts
@@ -0,0 +1,3 @@
+import text from "./text-loader-fixture-text-file.latin1.txt";
+
+console.write(text);
diff --git a/test/js/bun/util/text-loader-fixture-import-nonascii.ts b/test/js/bun/util/text-loader-fixture-import-nonascii.ts
new file mode 100644
index 00000000000000..be32cecef44a70
--- /dev/null
+++ b/test/js/bun/util/text-loader-fixture-import-nonascii.ts
@@ -0,0 +1,3 @@
+import text from "./text-loader-fixture-text-file.nonascii.txt";
+
+console.write(text);
diff --git a/test/js/bun/util/text-loader-fixture-import.ts b/test/js/bun/util/text-loader-fixture-import.ts
index ec72286aca8533..1cba9cff6584f4 100644
--- a/test/js/bun/util/text-loader-fixture-import.ts
+++ b/test/js/bun/util/text-loader-fixture-import.ts
@@ -1,3 +1,2 @@
import text from "./text-loader-fixture-text-file.txt";
-
console.write(text);
diff --git a/test/js/bun/util/text-loader-fixture-text-file.backslashes.txt b/test/js/bun/util/text-loader-fixture-text-file.backslashes.txt
new file mode 100644
index 00000000000000..be9c8a65500fce
--- /dev/null
+++ b/test/js/bun/util/text-loader-fixture-text-file.backslashes.txt
@@ -0,0 +1,15 @@
+ .---------.
+ .--' o . `--.
+ .'@ @@@@@@ . . `.
+ .' . @@@@@@@@ @@@@@@ `.
+ / @@o @@@@@@. @@@@ O @\
+ |@@@ @@@@@@ @@|
+/ @@@@ `.-. @@@@@@@@ @@\
+|@ @@ @@@@@@ @@ |
+\ @@ @ .() @@ @@@@ /
+ | @@@ @@@ @@ |
+ \ . @@ @\ . . @@ o /
+ `. @@@@ _\ / . o .'
+ `. / | o .'
+ `--./ . .--'
+ `---------'
\ No newline at end of file
diff --git a/test/js/bun/util/text-loader-fixture-text-file.latin1.txt b/test/js/bun/util/text-loader-fixture-text-file.latin1.txt
new file mode 100644
index 00000000000000..a52a8cec3997e6
--- /dev/null
+++ b/test/js/bun/util/text-loader-fixture-text-file.latin1.txt
@@ -0,0 +1,3 @@
+AAAAAAAA
+ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ
+AAAAAAAA
\ No newline at end of file
diff --git a/test/js/bun/util/text-loader-fixture-text-file.nonascii.txt b/test/js/bun/util/text-loader-fixture-text-file.nonascii.txt
new file mode 100644
index 00000000000000..f4ad559440dfae
--- /dev/null
+++ b/test/js/bun/util/text-loader-fixture-text-file.nonascii.txt
@@ -0,0 +1,10 @@
+Hello, world!
+This is a test file featuring non-ASCII characters:
+
+1. Café ☕
+2. Déjà vu 🌙
+3. Pokémon 🐱👤
+4. Übermensch 🦸♂️
+5. 日本語 🇯🇵
+
+Enjoy testing your text loader with this file!
\ No newline at end of file
diff --git a/test/js/bun/util/text-loader.test.ts b/test/js/bun/util/text-loader.test.ts
index 65e6f23ed471cf..604620e735cfae 100644
--- a/test/js/bun/util/text-loader.test.ts
+++ b/test/js/bun/util/text-loader.test.ts
@@ -2,6 +2,7 @@ import { spawnSync } from "bun";
import { describe, expect, it } from "bun:test";
import { bunEnv, bunExe } from "harness";
import { join } from "path";
+import { readFileSync } from "fs";
describe("text-loader", () => {
const fixtures = [
@@ -30,4 +31,40 @@ describe("text-loader", () => {
});
});
}
+
+ for (let [entry, path] of [
+ // https://github.com/oven-sh/bun/issues/10206
+ ["text-loader-fixture-import-nonascii.ts", "text-loader-fixture-text-file.nonascii.txt"],
+ ["text-loader-fixture-import-latin1.ts", "text-loader-fixture-text-file.latin1.txt"],
+ // https://github.com/oven-sh/bun/issues/3449
+ ["text-loader-fixture-import-backslashes.ts", "text-loader-fixture-text-file.backslashes.txt"],
+ ]) {
+ describe("should load non-ASCII text", () => {
+ it(`${entry}`, async () => {
+ const src = join(import.meta.dir, entry);
+ const result = spawnSync({
+ cmd: [bunExe(), src],
+ env: bunEnv,
+ stdout: "pipe",
+ stderr: "inherit",
+ stdin: "ignore",
+ });
+
+ if (result.exitCode !== 0) {
+ console.log({ result });
+ }
+
+ const absolute = join(import.meta.dir, path);
+
+ const expected = readFileSync(absolute, "utf8");
+ const source = readFileSync(src, "utf8");
+ expect(result.stdout.toString()).toBe(expected);
+
+ // Also test that `type: "text"` has higher precedence than the file extension.
+ expect((await import(src, { with: { type: "text" } })).default).toBe(source);
+
+ expect(result.exitCode).toBe(0);
+ });
+ });
+ }
});