Skip to content

Commit

Permalink
Support type import attribute with "text", "json", "toml", an…
Browse files Browse the repository at this point in the history
…d `"file"` (#10456)

* Fixes #3449

Fixes #10206
Fixes #5710

* Apply formatting changes

* Update loaders.md

* Update text-loader-fixture-import.ts

* Add guide

* Update bundler_loader.test.ts

* Address comment

---------

Co-authored-by: Jarred-Sumner <[email protected]>
  • Loading branch information
Jarred-Sumner and Jarred-Sumner authored Apr 23, 2024
1 parent ff62414 commit 024c274
Show file tree
Hide file tree
Showing 21 changed files with 516 additions and 42 deletions.
7 changes: 7 additions & 0 deletions docs/bundler/loaders.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,9 @@ TOML files can be directly imported. Bun will parse them with its fast native TO
```ts
import config from "./bunfig.toml";
config.logLevel; // => "debug"

// via import attribute:
// import myCustomTOML from './my.config' with {type: "toml"};
```

During bundling, the parsed TOML is inlined into the bundle as a JavaScript object.
Expand Down Expand Up @@ -122,6 +125,10 @@ Text files can be directly imported. The file is read and returned as a string.
```ts
import contents from "./file.txt";
console.log(contents); // => "Hello, world!"

// To import an html file as text
// The "type' attribute can be used to override the default loader.
import html from "./index.html" with { type: "text" };
```

When referenced during a build, the contents are into the bundle as a string.
Expand Down
15 changes: 15 additions & 0 deletions docs/guides/runtime/import-html.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
---
name: Import HTML file as text
---

To import a `.html` file in Bun as a text file, use the `type: "text"` attribute in the import statement.

```ts
import html from "./file.html" with { type: "text" };

console.log(html); // <!DOCTYPE html><html><head>...
```

This can also be used with hot module reloading and/or watch mode to force Bun to reload whenever the `./file.html` file changes.

This feature was added in Bun v1.1.5.
19 changes: 18 additions & 1 deletion src/bun.js/bindings/ZigGlobalObject.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3866,8 +3866,25 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderImportModule(JSGlobalObject* j
resolvedIdentifier = JSC::Identifier::fromString(vm, makeString(resolved.result.value.toWTFString(BunString::ZeroCopy), Zig::toString(queryString)));
}

// This gets passed through the "parameters" argument to moduleLoaderFetch.
// Therefore, we modify it in place.
if (parameters && parameters.isObject()) {
auto* object = parameters.toObject(globalObject);
if (auto withObject = object->getIfPropertyExists(globalObject, vm.propertyNames->withKeyword)) {
if (withObject.isObject()) {
auto* with = jsCast<JSObject*>(withObject);
if (auto type = with->getIfPropertyExists(globalObject, vm.propertyNames->type)) {
if (type.isString()) {
const auto typeString = type.toWTFString(globalObject);
parameters = JSC::JSScriptFetchParameters::create(vm, ScriptFetchParameters::create(typeString));
}
}
}
}
}

auto result = JSC::importModule(globalObject, resolvedIdentifier,
JSC::jsUndefined(), parameters, JSC::jsUndefined());
JSC::jsUndefined(), parameters, jsUndefined());
RETURN_IF_EXCEPTION(scope, promise->rejectWithCaughtException(globalObject, scope));

return result;
Expand Down
8 changes: 8 additions & 0 deletions src/bun.js/module_loader.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2254,6 +2254,14 @@ pub const ModuleLoader = struct {
if (type_attribute) |attribute| {
if (attribute.eqlComptime("sqlite")) {
loader = .sqlite;
} else if (attribute.eqlComptime("text")) {
loader = .text;
} else if (attribute.eqlComptime("json")) {
loader = .json;
} else if (attribute.eqlComptime("toml")) {
loader = .toml;
} else if (attribute.eqlComptime("file")) {
loader = .file;
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/bundler.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1597,7 +1597,7 @@ pub const Bundler = struct {
},
// TODO: use lazy export AST
.text => {
const expr = js_ast.Expr.init(js_ast.E.String, js_ast.E.String{
const expr = js_ast.Expr.init(js_ast.E.UTF8String, js_ast.E.UTF8String{
.data = source.contents,
}, logger.Loc.Empty);
const stmt = js_ast.Stmt.alloc(js_ast.S.ExportDefault, js_ast.S.ExportDefault{
Expand Down
8 changes: 5 additions & 3 deletions src/bundler/bundle_v2.zig
Original file line number Diff line number Diff line change
Expand Up @@ -610,7 +610,10 @@ pub const BundleV2 = struct {
.index = source_index,
},
.loader = loader,
.side_effects = _resolver.SideEffects.has_side_effects,
.side_effects = switch (loader) {
.text, .json, .toml, .file => _resolver.SideEffects.no_side_effects__pure_data,
else => _resolver.SideEffects.has_side_effects,
},
}) catch @panic("Ran out of memory");
var task = this.graph.allocator.create(ParseTask) catch @panic("Ran out of memory");
task.* = ParseTask.init(&resolve_result, source_index, this);
Expand Down Expand Up @@ -2587,9 +2590,8 @@ pub const ParseTask = struct {
return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?);
},
.text => {
const root = Expr.init(E.String, E.String{
const root = Expr.init(E.UTF8String, E.UTF8String{
.data = source.contents,
.prefer_template = true,
}, Logger.Loc{ .start = 0 });
return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?);
},
Expand Down
22 changes: 22 additions & 0 deletions src/import_record.zig
Original file line number Diff line number Diff line change
Expand Up @@ -214,15 +214,37 @@ pub const ImportRecord = struct {

with_type_sqlite,
with_type_sqlite_embedded,
with_type_text,
with_type_json,
with_type_toml,
with_type_file,

pub fn loader(this: Tag) ?bun.options.Loader {
return switch (this) {
.with_type_sqlite => .sqlite,
.with_type_sqlite_embedded => .sqlite_embedded,
.with_type_text => .text,
.with_type_json => .json,
.with_type_toml => .toml,
.with_type_file => .file,
else => null,
};
}

pub fn onlySupportsDefaultImports(this: Tag) bool {
return switch (this) {
.with_type_file, .with_type_text => true,
else => false,
};
}

pub fn isSQLite(this: Tag) bool {
return switch (this) {
.with_type_sqlite, .with_type_sqlite_embedded => true,
else => false,
};
}

pub fn isReactReference(this: Tag) bool {
return switch (this) {
.react_client_component, .react_server_component => true,
Expand Down
20 changes: 20 additions & 0 deletions src/js_ast.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2665,6 +2665,8 @@ pub const E = struct {
pub const Import = struct {
expr: ExprNodeIndex,
import_record_index: u32,
// This will be dynamic at some point.
type_attribute: TypeAttribute = .none,

/// Comments inside "import()" expressions have special meaning for Webpack.
/// Preserving comments inside these expressions makes it possible to use
Expand All @@ -2679,6 +2681,24 @@ pub const E = struct {
pub fn isImportRecordNull(this: *const Import) bool {
return this.import_record_index == std.math.maxInt(u32);
}

pub const TypeAttribute = enum {
none,
json,
toml,
text,
file,

pub fn tag(this: TypeAttribute) ImportRecord.Tag {
return switch (this) {
.none => .none,
.json => .with_type_json,
.toml => .with_type_toml,
.text => .with_type_text,
.file => .with_type_file,
};
}
};
};
};

Expand Down
90 changes: 74 additions & 16 deletions src/js_parser.zig
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,7 @@ const TransposeState = struct {
is_then_catch_target: bool = false,
is_require_immediately_assigned_to_decl: bool = false,
loc: logger.Loc = logger.Loc.Empty,
type_attribute: E.Import.TypeAttribute = .none,
};

var true_args = &[_]Expr{
Expand Down Expand Up @@ -4960,11 +4961,16 @@ fn NewParser_(
}

const import_record_index = p.addImportRecord(.dynamic, arg.loc, arg.data.e_string.slice(p.allocator));

if (state.type_attribute.tag() != .none) {
p.import_records.items[import_record_index].tag = state.type_attribute.tag();
}
p.import_records.items[import_record_index].handles_import_errors = (state.is_await_target and p.fn_or_arrow_data_visit.try_body_count != 0) or state.is_then_catch_target;
p.import_records_for_current_part.append(p.allocator, import_record_index) catch unreachable;
return p.newExpr(E.Import{
.expr = arg,
.import_record_index = Ref.toInt(import_record_index),
.type_attribute = state.type_attribute,
// .leading_interior_comments = arg.getString().
}, state.loc);
}
Expand All @@ -4978,6 +4984,7 @@ fn NewParser_(
return p.newExpr(E.Import{
.expr = arg,
.import_record_index = std.math.maxInt(u32),
.type_attribute = state.type_attribute,
}, state.loc);
}

Expand Down Expand Up @@ -8841,28 +8848,41 @@ fn NewParser_(
}

if (path.import_tag != .none) {
try p.validateSQLiteImportType(path.import_tag, &stmt);
try p.validateImportType(path.import_tag, &stmt);
}

// Track the items for this namespace
try p.import_items_for_namespace.put(p.allocator, stmt.namespace_ref, item_refs);
return p.s(stmt, loc);
}

fn validateSQLiteImportType(p: *P, import_tag: ImportRecord.Tag, stmt: *S.Import) !void {
fn validateImportType(p: *P, import_tag: ImportRecord.Tag, stmt: *S.Import) !void {
@setCold(true);

if (import_tag == .with_type_sqlite or import_tag == .with_type_sqlite_embedded) {
if (import_tag.loader() != null) {
p.import_records.items[stmt.import_record_index].tag = import_tag;

for (stmt.items) |*item| {
if (!(strings.eqlComptime(item.alias, "default") or strings.eqlComptime(item.alias, "db"))) {
try p.log.addError(
p.source,
item.name.loc,
"sqlite imports only support the \"default\" or \"db\" imports",
);
break;
if (import_tag.isSQLite()) {
for (stmt.items) |*item| {
if (!(strings.eqlComptime(item.alias, "default") or strings.eqlComptime(item.alias, "db"))) {
try p.log.addError(
p.source,
item.name.loc,
"sqlite imports only support the \"default\" or \"db\" imports",
);
break;
}
}
} else if (import_tag.onlySupportsDefaultImports()) {
for (stmt.items) |*item| {
if (!(strings.eqlComptime(item.alias, "default"))) {
try p.log.addError(
p.source,
item.name.loc,
"This loader type only supports the \"default\" import",
);
break;
}
}
}
}
Expand Down Expand Up @@ -11696,7 +11716,6 @@ fn NewParser_(
try p.lexer.expect(.t_string_literal);
}

// For now, we silently strip import assertions
if (!p.lexer.has_newline_before and (
// Import Assertions are deprecated.
// Import Attributes are the new way to do this.
Expand Down Expand Up @@ -11751,13 +11770,22 @@ fn NewParser_(
if (supported_attribute) |attr| {
switch (attr) {
.type => {
if (strings.eqlComptime(p.lexer.string_literal_slice, "macro")) {
const type_attr = p.lexer.string_literal_slice;
if (strings.eqlComptime(type_attr, "macro")) {
path.is_macro = true;
} else if (strings.eqlComptime(p.lexer.string_literal_slice, "sqlite")) {
} else if (strings.eqlComptime(type_attr, "sqlite")) {
path.import_tag = .with_type_sqlite;
if (has_seen_embed_true) {
path.import_tag = .with_type_sqlite_embedded;
}
} else if (strings.eqlComptime(type_attr, "json")) {
path.import_tag = .with_type_json;
} else if (strings.eqlComptime(type_attr, "toml")) {
path.import_tag = .with_type_toml;
} else if (strings.eqlComptime(type_attr, "text")) {
path.import_tag = .with_type_text;
} else if (strings.eqlComptime(type_attr, "file")) {
path.import_tag = .with_type_file;
}
},
.embed => {
Expand Down Expand Up @@ -14970,14 +14998,37 @@ fn NewParser_(

const value = try p.parseExpr(.comma);

var type_attribute = E.Import.TypeAttribute.none;

if (p.lexer.token == .t_comma) {
// "import('./foo.json', )"
try p.lexer.next();

if (p.lexer.token != .t_close_paren) {
// for now, we silently strip import assertions
// "import('./foo.json', { assert: { type: 'json' } })"
_ = try p.parseExpr(.comma);
const import_expr = try p.parseExpr(.comma);
if (import_expr.data == .e_object) {
if (import_expr.data.e_object.get("with") orelse import_expr.data.e_object.get("assert")) |with| {
if (with.data == .e_object) {
const with_object = with.data.e_object;
if (with_object.get("type")) |field| {
if (field.data == .e_string) {
const str = field.data.e_string;
if (str.eqlComptime("json")) {
type_attribute = .json;
} else if (str.eqlComptime("toml")) {
type_attribute = .toml;
} else if (str.eqlComptime("text")) {
type_attribute = .text;
} else if (str.eqlComptime("file")) {
type_attribute = .file;
}
}
}
}
}
}

if (p.lexer.token == .t_comma) {
// "import('./foo.json', { assert: { type: 'json' } }, , )"
Expand All @@ -14998,11 +15049,17 @@ fn NewParser_(
.expr = value,
.leading_interior_comments = comments,
.import_record_index = import_record_index,
.type_attribute = type_attribute,
}, loc);
}
}

return p.newExpr(E.Import{ .expr = value, .leading_interior_comments = comments, .import_record_index = std.math.maxInt(u32) }, loc);
return p.newExpr(E.Import{
.expr = value,
.type_attribute = type_attribute,
.leading_interior_comments = comments,
.import_record_index = std.math.maxInt(u32),
}, loc);
}

fn parseJSXPropValueIdentifier(p: *P, previous_string_with_backslash_loc: *logger.Loc) !Expr {
Expand Down Expand Up @@ -16837,6 +16894,7 @@ fn NewParser_(
.is_await_target = if (p.await_target != null) p.await_target.? == .e_import and p.await_target.?.e_import == e_ else false,
.is_then_catch_target = p.then_catch_chain.has_catch and std.meta.activeTag(p.then_catch_chain.next_target) == .e_import and expr.data.e_import == p.then_catch_chain.next_target.e_import,
.loc = e_.expr.loc,
.type_attribute = e_.type_attribute,
};

e_.expr = p.visitExpr(e_.expr);
Expand Down
Loading

0 comments on commit 024c274

Please sign in to comment.