diff --git a/.vscode/launch.json b/.vscode/launch.json
index 23205ab1f13f7..b005b8adf3aca 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -1192,4 +1192,4 @@
"description": "Usage: bun test [...]",
},
],
-}
+}
\ No newline at end of file
diff --git a/build.zig b/build.zig
index 776585f3e896d..bc92dc8ac7767 100644
--- a/build.zig
+++ b/build.zig
@@ -157,7 +157,7 @@ pub fn build(b: *Build) !void {
// TODO: Upgrade path for 0.14.0
// b.graph.zig_lib_directory = brk: {
- // const sub_path = "src/deps/zig/lib";
+ // const sub_path = "vendor/zig/lib";
// const dir = try b.build_root.handle.openDir(sub_path, .{});
// break :brk .{ .handle = dir, .path = try b.build_root.join(b.graph.arena, &.{sub_path}) };
// };
@@ -295,7 +295,7 @@ pub fn build(b: *Build) !void {
bun_check_obj.generated_bin = null;
step.dependOn(&bun_check_obj.step);
- // The default install step will run zig build check This is so ZLS
+ // The default install step will run zig build check. This is so ZLS
// identifies the codebase, as well as performs checking if build on
// save is enabled.
diff --git a/packages/bun-darwin-aarch64/.npmignore b/packages/bun-darwin-aarch64/.npmignore
deleted file mode 100644
index 08d23cb2a7b5a..0000000000000
--- a/packages/bun-darwin-aarch64/.npmignore
+++ /dev/null
@@ -1,4 +0,0 @@
-bin/bun-profile
-bin/*.o
-*.o
-*.a
\ No newline at end of file
diff --git a/packages/bun-linux-x64/.npmignore b/packages/bun-linux-x64/.npmignore
deleted file mode 100644
index 08d23cb2a7b5a..0000000000000
--- a/packages/bun-linux-x64/.npmignore
+++ /dev/null
@@ -1,4 +0,0 @@
-bin/bun-profile
-bin/*.o
-*.o
-*.a
\ No newline at end of file
diff --git a/packages/bun-plugin-css/README.md b/packages/bun-plugin-css/README.md
deleted file mode 100644
index 9abf5f6c0eb89..0000000000000
--- a/packages/bun-plugin-css/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# `bun-plugin-css`
-
-Not implemented.
diff --git a/packages/bun-plugin-css/index.ts b/packages/bun-plugin-css/index.ts
deleted file mode 100644
index bf67f6e7e0e1c..0000000000000
--- a/packages/bun-plugin-css/index.ts
+++ /dev/null
@@ -1 +0,0 @@
-throw new Error("Not implemented.");
diff --git a/packages/bun-plugin-css/package.json b/packages/bun-plugin-css/package.json
deleted file mode 100644
index 68b4020d90a43..0000000000000
--- a/packages/bun-plugin-css/package.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "name": "bun-plugin-css",
- "version": "0.0.1-alpha.0",
- "module": "index.ts",
- "type": "module",
- "files": [
- "index.ts",
- "package.json"
- ]
-}
diff --git a/packages/bun-plugin-lightningcss/README.md b/packages/bun-plugin-lightningcss/README.md
deleted file mode 100644
index 7cbe8c64b320c..0000000000000
--- a/packages/bun-plugin-lightningcss/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# `bun-plugin-lightningcss`
-
-Not implemented.
diff --git a/packages/bun-plugin-lightningcss/index.ts b/packages/bun-plugin-lightningcss/index.ts
deleted file mode 100644
index bf67f6e7e0e1c..0000000000000
--- a/packages/bun-plugin-lightningcss/index.ts
+++ /dev/null
@@ -1 +0,0 @@
-throw new Error("Not implemented.");
diff --git a/packages/bun-plugin-lightningcss/package.json b/packages/bun-plugin-lightningcss/package.json
deleted file mode 100644
index 6aafdedd1bd26..0000000000000
--- a/packages/bun-plugin-lightningcss/package.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "name": "bun-plugin-lightningcss",
- "version": "0.0.1-alpha.0",
- "module": "index.ts",
- "type": "module",
- "files": [
- "index.ts",
- "package.json"
- ]
-}
diff --git a/packages/bun-plugin-mdx/README.md b/packages/bun-plugin-mdx/README.md
deleted file mode 100644
index 65890d7c24e22..0000000000000
--- a/packages/bun-plugin-mdx/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# `bun-plugin-mdx`
-
-Not implemented.
diff --git a/packages/bun-plugin-mdx/index.ts b/packages/bun-plugin-mdx/index.ts
deleted file mode 100644
index bf67f6e7e0e1c..0000000000000
--- a/packages/bun-plugin-mdx/index.ts
+++ /dev/null
@@ -1 +0,0 @@
-throw new Error("Not implemented.");
diff --git a/packages/bun-plugin-mdx/package.json b/packages/bun-plugin-mdx/package.json
deleted file mode 100644
index 98047872f2890..0000000000000
--- a/packages/bun-plugin-mdx/package.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "name": "bun-plugin-mdx",
- "version": "0.0.1-alpha.0",
- "module": "index.ts",
- "type": "module",
- "files": [
- "index.ts",
- "package.json"
- ]
-}
diff --git a/packages/bun-plugin-server-components/.gitignore b/packages/bun-plugin-server-components/.gitignore
deleted file mode 100644
index f81d56eaa35f6..0000000000000
--- a/packages/bun-plugin-server-components/.gitignore
+++ /dev/null
@@ -1,169 +0,0 @@
-# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
-
-# Logs
-
-logs
-_.log
-npm-debug.log_
-yarn-debug.log*
-yarn-error.log*
-lerna-debug.log*
-.pnpm-debug.log*
-
-# Diagnostic reports (https://nodejs.org/api/report.html)
-
-report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
-
-# Runtime data
-
-pids
-_.pid
-_.seed
-\*.pid.lock
-
-# Directory for instrumented libs generated by jscoverage/JSCover
-
-lib-cov
-
-# Coverage directory used by tools like istanbul
-
-coverage
-\*.lcov
-
-# nyc test coverage
-
-.nyc_output
-
-# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
-
-.grunt
-
-# Bower dependency directory (https://bower.io/)
-
-bower_components
-
-# node-waf configuration
-
-.lock-wscript
-
-# Compiled binary addons (https://nodejs.org/api/addons.html)
-
-build/Release
-
-# Dependency directories
-
-node_modules/
-jspm_packages/
-
-# Snowpack dependency directory (https://snowpack.dev/)
-
-web_modules/
-
-# TypeScript cache
-
-\*.tsbuildinfo
-
-# Optional npm cache directory
-
-.npm
-
-# Optional eslint cache
-
-.eslintcache
-
-# Optional stylelint cache
-
-.stylelintcache
-
-# Microbundle cache
-
-.rpt2_cache/
-.rts2_cache_cjs/
-.rts2_cache_es/
-.rts2_cache_umd/
-
-# Optional REPL history
-
-.node_repl_history
-
-# Output of 'npm pack'
-
-\*.tgz
-
-# Yarn Integrity file
-
-.yarn-integrity
-
-# dotenv environment variable files
-
-.env
-.env.development.local
-.env.test.local
-.env.production.local
-.env.local
-
-# parcel-bundler cache (https://parceljs.org/)
-
-.cache
-.parcel-cache
-
-# Next.js build output
-
-.next
-out
-
-# Nuxt.js build / generate output
-
-.nuxt
-dist
-
-# Gatsby files
-
-.cache/
-
-# Comment in the public line in if your project uses Gatsby and not Next.js
-
-# https://nextjs.org/blog/next-9-1#public-directory-support
-
-# public
-
-# vuepress build output
-
-.vuepress/dist
-
-# vuepress v2.x temp and cache directory
-
-.temp
-.cache
-
-# Docusaurus cache and generated files
-
-.docusaurus
-
-# Serverless directories
-
-.serverless/
-
-# FuseBox cache
-
-.fusebox/
-
-# DynamoDB Local files
-
-.dynamodb/
-
-# TernJS port file
-
-.tern-port
-
-# Stores VSCode versions used for testing VSCode extensions
-
-.vscode-test
-
-# yarn v2
-
-.yarn/cache
-.yarn/unplugged
-.yarn/build-state.yml
-.yarn/install-state.gz
-.pnp.\*
diff --git a/packages/bun-plugin-server-components/README.md b/packages/bun-plugin-server-components/README.md
deleted file mode 100644
index 32ce31fede914..0000000000000
--- a/packages/bun-plugin-server-components/README.md
+++ /dev/null
@@ -1,149 +0,0 @@
-# `bun-plugin-server-components`
-
-The official Bun plugin for **server components**.
-
-## Installation
-
-```sh
-bun add bun-plugin-server-components -d
-```
-
-## Context
-
-Server components are a new abstraction for building web applications. They look similar to standard React/JSX components, but render exclusively on the server. They differ from classic "client components" in a few ways:
-
-1. They can be `async`.
-2. Their implementation can run privileged code like database queries. Normally this would be unsafe, because the source code of client components are typically bundled and sent to the client, where they can be inspected and reverse-engineered. Server components are never sent to the client, so they can run privileged code safely.
-3. They _cannot_ contain stateful hooks like `useState` or `useEffect`.
-
-Server components require a deep integration with the bundler to work. To understand why, we need a bit of background on how server components work.
-
-### How server components work
-
-Imagine you have a server component that looks like this:
-
-```tsx
-// index.tsx
-import { Component } from "./Component";
-export default async function HomePage() {
- return (
-
-
-
- );
-}
-```
-
-This file imports a client component called `Component`.
-
-```ts
-// ./Component.tsx
-"use client";
-
-export function Component() {
- return Hello world
;
-}
-```
-
-To run this component we need to generate two builds.
-
-> Here the term "build" refers to a typical bundling step—the act of converting a set of entrypoints into a set of bundles.
-
-1. The first is our "server component build". It contains all the code we need to render `HomePage` to a component tree. When an incoming `Request` comes in, we can use React's built-in tools to convert this tree into a "virtual DOM stream" that we can return as a `Response`.
-2. The second is our "client build". It contains the bundled versions of all client components that were referenced by our server components.
-
-The browser hits the server and gets back the "virtual DOM stream". The virtual DOM stream will contain references to client components, which will be loaded from the client bundle. React provides a built-in utility (`createFromFetch`)that accepts the VDOM stream, dynamically loads the necessary client components, and returns a renderable component.
-
-```ts
-import { createRoot } from "react-dom/client";
-import { createFromFetch } from "react-server-dom-webpack/client.browser";
-
-const stream = fetch("/", { headers: { Accept: "text/x-component" } });
-const data = createFromFetch(stream);
-
-const root = createRoot(document);
-root.render();
-```
-
-### Server-side rendering
-
-One potentially confusing aspect of server components is that they "return" virtual DOM. From the perspective of a server component, client components are black boxes.
-
-If we want to do server-side rendering, we need to render our server component to VDOM, _then_ render the VDOM to plain HTML. These are two distinct steps. The second step requires a _third build_, we we'll call the "SSR build". Like the "client build", this build will bundle all the client components. Unlike the "client build", those bundles will be intended for consumption on the server; in bundler terms, the build's `"target"` will be`"bun"` (or perhaps `"node"`).
-
-### Bundling server components
-
-That's a high-level overview of how server components work. The important takeaway is that we need to generate totally separate bundles for server and client components.
-
-But it's not just a simple matter of running two separate bundling scripts. The true "entrypoints" of our application are the server components. Over the course of bundling our server components, we will discover some files containing the `"use client"` directive; these files then become the entrypoints for our "client build", which will require a totally separate build configuration from the server build.
-
-The goal of this plugin is to hide the complexty of this multi-stage build from the user.
-
-## Usage
-
-To use this plugin:
-
-```ts
-import ServerComponentsPlugin from "bun-plugin-server-components";
-
-await Bun.build({
- entrypoints: ["./index.tsx"], // server component files
- plugins: [
- ServerComponentsPlugin({
- // plugin configuration
- }),
- ],
- // other configuration
-});
-```
-
-The `"entrypoints"` you pass into `Bun.build()` should be your _server components_. Bun's bundler will automatically detect any files containing the `"use client"` directive, and will use those files as entrypoints for the "client build" and "SSR build". The bundler configuration for these builds can be provided `client` and `ssr` keys respectively.
-
-```ts
-import ServerComponentsPlugin from "bun-plugin-server-components";
-
-await Bun.build({
- entrypoints: ["./index.tsx"], // server component files
- outdir: "./build",
- manifest: true,
- plugins: [ServerComponentsPlugin({
- client: {
- entrypoints: [], // optional - additional client entrypoints
- outdir: "./build/client", // default: inherits from the main build
- target: "browser",
- plugins: [/* */],
- }
- ssr: {
- entrypoints: [], // optional - additional SSR entrypoints
- outdir: "./build/client", // default: inherits from the main build
- target: "bun", // this is default
- plugins: [/* */],
- }
- })],
-});
-```
-
-The result of `Bun.build()` will contain additional manifests for the SSR and client builds.
-
-```ts
-const result = await Bun.build({
- // config
- plugins: [
- ServerComponentsPlugin({
- /* config */
- }),
- ],
-});
-
-// standard manifest
-// for the top-level (server components) build
-result.manifest;
-
-// manifest for client build
-result.clientManifest;
-
-// manifest for client build
-result.ssrManifest;
-```
-
-Once the build is complete, use the manifests to implement your RSC server.
diff --git a/packages/bun-plugin-server-components/bun.lockb b/packages/bun-plugin-server-components/bun.lockb
deleted file mode 100755
index 460954c699637..0000000000000
Binary files a/packages/bun-plugin-server-components/bun.lockb and /dev/null differ
diff --git a/packages/bun-plugin-server-components/index.ts b/packages/bun-plugin-server-components/index.ts
deleted file mode 100644
index 67d98eb769d2a..0000000000000
--- a/packages/bun-plugin-server-components/index.ts
+++ /dev/null
@@ -1,10 +0,0 @@
-import { BunPlugin, BuildConfig } from "bun";
-
-function Plugin(config: { client?: BuildConfig; ssr?: BuildConfig }): BunPlugin {
- return {
- name: "bun-plugin-server-components",
- SECRET_SERVER_COMPONENTS_INTERNALS: config,
- } as any;
-}
-
-export default Plugin;
diff --git a/packages/bun-plugin-server-components/package.json b/packages/bun-plugin-server-components/package.json
deleted file mode 100644
index 9d28f36fd74c7..0000000000000
--- a/packages/bun-plugin-server-components/package.json
+++ /dev/null
@@ -1,28 +0,0 @@
-{
- "name": "bun-plugin-server-components",
- "version": "0.0.1-alpha.0",
- "module": "index.ts",
- "type": "module",
- "types": "index.ts",
- "exports": {
- ".": {
- "import": "./index.ts",
- "require": "./index.ts",
- "default": "./index.js"
- },
- "./package.json": "./package.json"
- },
- "files": [
- "index.ts",
- "tsconfig.json",
- "package.json",
- "modules.d.ts"
- ],
- "devDependencies": {
- "@types/js-yaml": "^4.0.5"
- },
- "dependencies": {
- "bun-types": "canary",
- "js-yaml": "^4.1.0"
- }
-}
diff --git a/packages/bun-plugin-server-components/tsconfig.json b/packages/bun-plugin-server-components/tsconfig.json
deleted file mode 100644
index a03219b2ebf8f..0000000000000
--- a/packages/bun-plugin-server-components/tsconfig.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "compilerOptions": {
- "lib": ["ESNext"],
- "module": "esnext",
- "target": "esnext",
- "moduleResolution": "bundler",
- "moduleDetection": "force",
- "allowImportingTsExtensions": true,
- "noEmit": true,
- "composite": true,
- "strict": true,
- "downlevelIteration": true,
- "skipLibCheck": true,
- "jsx": "react-jsx",
- "allowSyntheticDefaultImports": true,
- "forceConsistentCasingInFileNames": true,
- "allowJs": true,
- "types": [
- "bun-types" // add Bun global
- ]
- },
- "include": ["**/*.ts", "modules.d.ts"]
-}
diff --git a/src/Global.zig b/src/Global.zig
index d3becfed78495..94b0bc70c3bdb 100644
--- a/src/Global.zig
+++ b/src/Global.zig
@@ -172,23 +172,16 @@ const string = bun.string;
pub const BunInfo = struct {
bun_version: string,
platform: Analytics.GenerateHeader.GeneratePlatform.Platform,
- framework: string = "",
- framework_version: string = "",
const Analytics = @import("./analytics/analytics_thread.zig");
const JSON = bun.JSON;
const JSAst = bun.JSAst;
- pub fn generate(comptime Bundler: type, bundler: Bundler, allocator: std.mem.Allocator) !JSAst.Expr {
- var info = BunInfo{
+ pub fn generate(comptime Bundler: type, _: Bundler, allocator: std.mem.Allocator) !JSAst.Expr {
+ const info = BunInfo{
.bun_version = Global.package_json_version,
.platform = Analytics.GenerateHeader.GeneratePlatform.forOS(),
};
- if (bundler.options.framework) |framework| {
- info.framework = framework.package;
- info.framework_version = framework.version;
- }
-
return try JSON.toAST(allocator, BunInfo, info);
}
};
diff --git a/src/analytics/analytics_thread.zig b/src/analytics/analytics_thread.zig
index 72bbadf6faf4b..5ea78ece20ebd 100644
--- a/src/analytics/analytics_thread.zig
+++ b/src/analytics/analytics_thread.zig
@@ -79,41 +79,44 @@ pub fn isCI() bool {
/// This answers, "What parts of bun are people actually using?"
pub const Features = struct {
- /// Set right before JSC::initialize is called
- pub var jsc: usize = 0;
+ pub var builtin_modules = std.enums.EnumSet(bun.JSC.HardcodedModule).initEmpty();
+
pub var @"Bun.stderr": usize = 0;
pub var @"Bun.stdin": usize = 0;
pub var @"Bun.stdout": usize = 0;
+ pub var WebSocket: usize = 0;
pub var abort_signal: usize = 0;
+ pub var binlinks: usize = 0;
pub var bunfig: usize = 0;
pub var define: usize = 0;
pub var dotenv: usize = 0;
pub var external: usize = 0;
pub var extracted_packages: usize = 0;
- /// Incremented for each call to `fetch`
pub var fetch: usize = 0;
- pub var filesystem_router: usize = 0;
pub var git_dependencies: usize = 0;
pub var html_rewriter: usize = 0;
pub var http_server: usize = 0;
pub var https_server: usize = 0;
+ /// Set right before JSC::initialize is called
+ pub var jsc: usize = 0;
+ /// Set when kit.DevServer is initialized
+ pub var kit_dev: usize = 0;
pub var lifecycle_scripts: usize = 0;
pub var loaders: usize = 0;
pub var lockfile_migration_from_package_lock: usize = 0;
pub var macros: usize = 0;
+ pub var no_avx2: usize = 0;
+ pub var no_avx: usize = 0;
pub var shell: usize = 0;
pub var spawn: usize = 0;
+ pub var standalone_executable: usize = 0;
pub var standalone_shell: usize = 0;
+ /// Set when invoking a todo panic
+ pub var todo_panic: usize = 0;
pub var transpiler_cache: usize = 0;
- pub var tsconfig_paths: usize = 0;
pub var tsconfig: usize = 0;
+ pub var tsconfig_paths: usize = 0;
pub var virtual_modules: usize = 0;
- pub var WebSocket: usize = 0;
- pub var no_avx: usize = 0;
- pub var no_avx2: usize = 0;
- pub var binlinks: usize = 0;
- pub var builtin_modules = std.enums.EnumSet(bun.JSC.HardcodedModule).initEmpty();
- pub var standalone_executable: usize = 0;
pub var workers_spawned: usize = 0;
pub var workers_terminated: usize = 0;
diff --git a/src/api/schema.zig b/src/api/schema.zig
index 10e25bb56169a..37fda386c517f 100644
--- a/src/api/schema.zig
+++ b/src/api/schema.zig
@@ -1671,12 +1671,6 @@ pub const Api = struct {
/// extension_order
extension_order: []const []const u8,
- /// framework
- framework: ?FrameworkConfig = null,
-
- /// router
- router: ?RouteConfig = null,
-
/// no_summary
no_summary: ?bool = null,
diff --git a/src/ast/base.zig b/src/ast/base.zig
index 26f3815e2455f..701557a426aaf 100644
--- a/src/ast/base.zig
+++ b/src/ast/base.zig
@@ -48,6 +48,9 @@ pub const Index = packed struct(u32) {
pub const invalid = Index{ .value = std.math.maxInt(Int) };
pub const runtime = Index{ .value = 0 };
+ pub const kit_server_data = Index{ .value = 1 };
+ pub const kit_client_data = Index{ .value = 2 };
+
pub const Int = u32;
pub inline fn source(num: anytype) Index {
@@ -229,6 +232,8 @@ pub const Ref = packed struct(u64) {
*const std.ArrayList(js_ast.Symbol) => symbol_table.items,
*std.ArrayList(js_ast.Symbol) => symbol_table.items,
[]js_ast.Symbol => symbol_table,
+ *js_ast.Symbol.Map => return symbol_table.get(ref) orelse
+ unreachable, // ref must exist within symbol table
else => |T| @compileError("Unsupported type to Ref.getSymbol: " ++ @typeName(T)),
};
return &resolved_symbol_table[ref.innerIndex()];
diff --git a/src/baby_list.zig b/src/baby_list.zig
index d304f7549684e..baa07cc2de1cb 100644
--- a/src/baby_list.zig
+++ b/src/baby_list.zig
@@ -45,7 +45,7 @@ pub fn BabyList(comptime Type: type) type {
return this.len > 0 and @intFromPtr(item.ptr) >= @intFromPtr(this.ptr) and @intFromPtr(item.ptr) < @intFromPtr(this.ptr) + this.len;
}
- pub inline fn initConst(items: []const Type) ListType {
+ pub fn initConst(items: []const Type) callconv(bun.callconv_inline) ListType {
@setRuntimeSafety(false);
return ListType{
// Remove the const qualifier from the items
@@ -204,24 +204,24 @@ pub fn BabyList(comptime Type: type) type {
};
}
- pub inline fn first(this: ListType) ?*Type {
+ pub fn first(this: ListType) callconv(bun.callconv_inline) ?*Type {
return if (this.len > 0) this.ptr[0] else @as(?*Type, null);
}
- pub inline fn last(this: ListType) ?*Type {
+ pub fn last(this: ListType) callconv(bun.callconv_inline) ?*Type {
return if (this.len > 0) &this.ptr[this.len - 1] else @as(?*Type, null);
}
- pub inline fn first_(this: ListType) Type {
+ pub fn first_(this: ListType) callconv(bun.callconv_inline) Type {
return this.ptr[0];
}
- pub inline fn at(this: ListType, index: usize) *const Type {
+ pub fn at(this: ListType, index: usize) callconv(bun.callconv_inline) *const Type {
bun.assert(index < this.len);
return &this.ptr[index];
}
- pub inline fn mut(this: ListType, index: usize) *Type {
+ pub fn mut(this: ListType, index: usize) callconv(bun.callconv_inline) *Type {
bun.assert(index < this.len);
return &this.ptr[index];
}
@@ -236,7 +236,7 @@ pub fn BabyList(comptime Type: type) type {
};
}
- pub inline fn @"[0]"(this: ListType) Type {
+ pub fn @"[0]"(this: ListType) callconv(bun.callconv_inline) Type {
return this.ptr[0];
}
const OOM = error{OutOfMemory};
@@ -259,7 +259,7 @@ pub fn BabyList(comptime Type: type) type {
this.update(list__);
}
- pub inline fn slice(this: ListType) []Type {
+ pub fn slice(this: ListType) callconv(bun.callconv_inline) []Type {
@setRuntimeSafety(false);
return this.ptr[0..this.len];
}
@@ -273,6 +273,7 @@ pub fn BabyList(comptime Type: type) type {
this.update(list_);
return this.len - initial;
}
+
pub fn writeLatin1(this: *@This(), allocator: std.mem.Allocator, str: []const u8) !u32 {
if (comptime Type != u8)
@compileError("Unsupported for type " ++ @typeName(Type));
@@ -282,6 +283,7 @@ pub fn BabyList(comptime Type: type) type {
this.update(new);
return this.len - initial;
}
+
pub fn writeUTF16(this: *@This(), allocator: std.mem.Allocator, str: []const u16) !u32 {
if (comptime Type != u8)
@compileError("Unsupported for type " ++ @typeName(Type));
diff --git a/src/bun.js/api/BunObject.zig b/src/bun.js/api/BunObject.zig
index c22f173445302..21e729c6382b9 100644
--- a/src/bun.js/api/BunObject.zig
+++ b/src/bun.js/api/BunObject.zig
@@ -61,7 +61,6 @@ pub const BunObject = struct {
pub const Glob = toJSGetter(Bun.getGlobConstructor);
pub const Transpiler = toJSGetter(Bun.getTranspilerConstructor);
pub const argv = toJSGetter(Bun.getArgv);
- pub const assetPrefix = toJSGetter(Bun.getAssetPrefix);
pub const cwd = toJSGetter(Bun.getCWD);
pub const enableANSIColors = toJSGetter(Bun.enableANSIColors);
pub const hash = toJSGetter(Bun.getHashObject);
@@ -121,7 +120,6 @@ pub const BunObject = struct {
@export(BunObject.Glob, .{ .name = getterName("Glob") });
@export(BunObject.Transpiler, .{ .name = getterName("Transpiler") });
@export(BunObject.argv, .{ .name = getterName("argv") });
- @export(BunObject.assetPrefix, .{ .name = getterName("assetPrefix") });
@export(BunObject.cwd, .{ .name = getterName("cwd") });
@export(BunObject.enableANSIColors, .{ .name = getterName("enableANSIColors") });
@export(BunObject.hash, .{ .name = getterName("hash") });
@@ -254,161 +252,13 @@ const zlib = @import("../../zlib.zig");
const Which = @import("../../which.zig");
const ErrorableString = JSC.ErrorableString;
const is_bindgen = JSC.is_bindgen;
-const max_addressible_memory = std.math.maxInt(u56);
+const max_addressable_memory = std.math.maxInt(u56);
const glob = @import("../../glob.zig");
const Async = bun.Async;
const SemverObject = @import("../../install/semver.zig").SemverObject;
const Braces = @import("../../shell/braces.zig");
const Shell = @import("../../shell/shell.zig");
-threadlocal var css_imports_list_strings: [512]ZigString = undefined;
-threadlocal var css_imports_list: [512]Api.StringPointer = undefined;
-threadlocal var css_imports_list_tail: u16 = 0;
-threadlocal var css_imports_buf: std.ArrayList(u8) = undefined;
-threadlocal var css_imports_buf_loaded: bool = false;
-
-threadlocal var routes_list_strings: [1024]ZigString = undefined;
-
-pub fn onImportCSS(
- resolve_result: *const Resolver.Result,
- import_record: *ImportRecord,
- origin: URL,
-) void {
- if (!css_imports_buf_loaded) {
- css_imports_buf = std.ArrayList(u8).initCapacity(
- VirtualMachine.get().allocator,
- import_record.path.text.len,
- ) catch unreachable;
- css_imports_buf_loaded = true;
- }
-
- const writer = css_imports_buf.writer();
- const offset = css_imports_buf.items.len;
- css_imports_list[css_imports_list_tail] = .{
- .offset = @as(u32, @truncate(offset)),
- .length = 0,
- };
- getPublicPath(resolve_result.path_pair.primary.text, origin, @TypeOf(writer), writer);
- const length = css_imports_buf.items.len - offset;
- css_imports_list[css_imports_list_tail].length = @as(u32, @truncate(length));
- css_imports_list_tail += 1;
-}
-
-pub fn flushCSSImports() void {
- if (css_imports_buf_loaded) {
- css_imports_buf.clearRetainingCapacity();
- css_imports_list_tail = 0;
- }
-}
-
-pub fn getCSSImports() []ZigString {
- const tail = css_imports_list_tail;
- for (0..tail) |i| {
- ZigString.fromStringPointer(css_imports_list[i], css_imports_buf.items, &css_imports_list_strings[i]);
- }
- return css_imports_list_strings[0..tail];
-}
-
-const ShellTask = struct {
- arena: std.heap.Arena,
- script: std.ArrayList(u8),
- interpreter: Shell.InterpreterSync,
-
- pub const AsyncShellTask = JSC.ConcurrentPromiseTask(ShellTask);
-};
-
-pub fn shell(
- globalThis: *JSC.JSGlobalObject,
- callframe: *JSC.CallFrame,
-) JSC.JSValue {
- const Interpreter = @import("../../shell/interpreter.zig").Interpreter;
-
- // var allocator = globalThis.bunVM().allocator;
- const allocator = getAllocator(globalThis);
- var arena = bun.ArenaAllocator.init(allocator);
-
- const arguments_ = callframe.arguments(8);
- var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice());
- const string_args = arguments.nextEat() orelse {
- globalThis.throw("shell: expected 2 arguments, got 0", .{});
- return .undefined;
- };
-
- const template_args_js = arguments.nextEat() orelse {
- globalThis.throw("shell: expected 2 arguments, got 0", .{});
- return .undefined;
- };
- var template_args = template_args_js.arrayIterator(globalThis);
- var jsobjs = std.ArrayList(JSValue).init(arena.allocator());
- var script = std.ArrayList(u8).init(arena.allocator());
-
- if (!(bun.shell.shellCmdFromJS(globalThis, string_args, &template_args, &jsobjs, &script) catch {
- if (!globalThis.hasException())
- globalThis.throwOutOfMemory();
- return JSValue.undefined;
- })) {
- return .undefined;
- }
-
- if (globalThis.hasException()) {
- arena.deinit();
- return .undefined;
- }
-
- const lex_result = brk: {
- if (bun.strings.isAllASCII(script.items[0..])) {
- var lexer = Shell.LexerAscii.new(arena.allocator(), script.items[0..]);
- lexer.lex() catch |err| {
- globalThis.throwError(err, "failed to lex shell");
- return JSValue.undefined;
- };
- break :brk lexer.get_result();
- }
- var lexer = Shell.LexerUnicode.new(arena.allocator(), script.items[0..]);
- lexer.lex() catch |err| {
- globalThis.throwError(err, "failed to lex shell");
- return JSValue.undefined;
- };
- break :brk lexer.get_result();
- };
-
- var parser = Shell.Parser.new(arena.allocator(), lex_result, jsobjs.items[0..]) catch |err| {
- globalThis.throwError(err, "failed to create shell parser");
- return JSValue.undefined;
- };
-
- const script_ast = parser.parse() catch |err| {
- globalThis.throwError(err, "failed to parse shell");
- return JSValue.undefined;
- };
-
- const script_heap = arena.allocator().create(Shell.AST.Script) catch {
- globalThis.throwOutOfMemory();
- return JSValue.undefined;
- };
-
- script_heap.* = script_ast;
-
- const interpreter = Interpreter.init(
- globalThis,
- allocator,
- &arena,
- script_heap,
- jsobjs.items[0..],
- ) catch {
- arena.deinit();
- return .false;
- };
- _ = interpreter; // autofix
-
- // return interpreter;
- return .undefined;
-
- // return interpreter.start(globalThis) catch {
- // return .false;
- // };
-}
-
pub fn shellEscape(
globalThis: *JSC.JSGlobalObject,
callframe: *JSC.CallFrame,
@@ -902,13 +752,6 @@ pub fn getMain(
return ZigString.init(vm.main).toJS(globalThis);
}
-pub fn getAssetPrefix(
- globalThis: *JSC.JSGlobalObject,
- _: *JSC.JSObject,
-) JSC.JSValue {
- return ZigString.init(VirtualMachine.get().bundler.options.routes.asset_prefix_path).toJS(globalThis);
-}
-
pub fn getArgv(
globalThis: *JSC.JSGlobalObject,
_: *JSC.JSObject,
@@ -994,7 +837,7 @@ pub fn getPublicPath(to: string, origin: URL, comptime Writer: type, writer: Wri
to,
VirtualMachine.get().bundler.fs.top_level_dir,
origin,
- VirtualMachine.get().bundler.options.routes.asset_prefix_path,
+ "",
comptime Writer,
writer,
.loose,
@@ -1286,22 +1129,6 @@ export fn Bun__resolveSyncWithSource(
};
}
-pub fn getPublicPathJS(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSC.JSValue {
- const arguments = callframe.arguments(1).slice();
- if (arguments.len < 1) {
- return bun.String.empty.toJS(globalObject);
- }
- var public_path_temp_str: bun.PathBuffer = undefined;
-
- const to = arguments[0].toSlice(globalObject, bun.default_allocator);
- defer to.deinit();
- var stream = std.io.fixedBufferStream(&public_path_temp_str);
- var writer = stream.writer();
- getPublicPath(to.slice(), VirtualMachine.get().origin, @TypeOf(&writer), &writer);
-
- return ZigString.init(stream.buffer[0..stream.pos]).toJS(globalObject);
-}
-
extern fn dump_zone_malloc_stats() void;
fn dump_mimalloc(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue {
@@ -4377,7 +4204,7 @@ pub const FFIObject = struct {
}
}
- if (addr > max_addressible_memory) {
+ if (addr > max_addressable_memory) {
return JSC.toInvalidArguments("Pointer is outside max addressible memory, which usually means a bug in your program.", .{}, globalThis);
}
@@ -4459,7 +4286,7 @@ pub const FFIObject = struct {
return .{ .err = JSC.toInvalidArguments("length must be > 0. This usually means a bug in your code.", .{}, globalThis) };
}
- if (length_i > max_addressible_memory) {
+ if (length_i > max_addressable_memory) {
return .{ .err = JSC.toInvalidArguments("length exceeds max addressable memory. This usually means a bug in your code.", .{}, globalThis) };
}
diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig
index 7008c23069f8f..841ba7c48216c 100644
--- a/src/bun.js/api/JSBundler.zig
+++ b/src/bun.js/api/JSBundler.zig
@@ -100,59 +100,6 @@ pub const JSBundler = struct {
globalThis.throwInvalidArguments("Expected plugin to be an object", .{});
return error.JSError;
}
- if (try plugin.getOwnObject(globalThis, "SECRET_SERVER_COMPONENTS_INTERNALS")) |internals| {
- if (internals.getOwn(globalThis, "router")) |router_value| {
- if (router_value.as(JSC.API.FileSystemRouter) != null) {
- this.server_components.router.set(globalThis, router_value);
- } else {
- globalThis.throwInvalidArguments("Expected router to be a Bun.FileSystemRouter", .{});
- return error.JSError;
- }
- }
-
- const directive_object = (try internals.getOwnObject(globalThis, "directive")) orelse {
- globalThis.throwInvalidArguments("Expected directive to be an object", .{});
- return error.JSError;
- };
-
- if (try directive_object.getArray(globalThis, "client")) |client_names_array| {
- var array_iter = client_names_array.arrayIterator(globalThis);
- while (array_iter.next()) |client_name| {
- var slice = client_name.toSliceOrNull(globalThis) orelse {
- globalThis.throwInvalidArguments("Expected directive.client to be an array of strings", .{});
- return error.JSError;
- };
- defer slice.deinit();
- try this.server_components.client.append(allocator, try OwnedString.initCopy(allocator, slice.slice()));
- }
- } else {
- globalThis.throwInvalidArguments("Expected directive.client to be an array of strings", .{});
- return error.JSError;
- }
-
- if (try directive_object.getArray(globalThis, "server")) |server_names_array| {
- var array_iter = server_names_array.arrayIterator(globalThis);
- while (array_iter.next()) |server_name| {
- var slice = server_name.toSliceOrNull(globalThis) orelse {
- globalThis.throwInvalidArguments("Expected directive.server to be an array of strings", .{});
- return error.JSError;
- };
- defer slice.deinit();
- try this.server_components.server.append(allocator, try OwnedString.initCopy(allocator, slice.slice()));
- }
- } else {
- globalThis.throwInvalidArguments("Expected directive.server to be an array of strings", .{});
- return error.JSError;
- }
-
- continue;
- }
-
- // var decl = PluginDeclaration{
- // .name = OwnedString.initEmpty(allocator),
- // .setup = .{},
- // };
- // defer decl.deinit();
if (plugin.getOwnOptional(globalThis, "name", ZigString.Slice) catch null) |slice| {
defer slice.deinit();
diff --git a/src/bun.js/api/JSTranspiler.zig b/src/bun.js/api/JSTranspiler.zig
index 2182ed6ccc950..4acb5bbb94a53 100644
--- a/src/bun.js/api/JSTranspiler.zig
+++ b/src/bun.js/api/JSTranspiler.zig
@@ -827,8 +827,9 @@ pub fn constructor(
bundler.options.auto_import_jsx = transpiler_options.runtime.auto_import_jsx;
bundler.options.inlining = transpiler_options.runtime.inlining;
bundler.options.hot_module_reloading = transpiler_options.runtime.hot_module_reloading;
- bundler.options.jsx.supports_fast_refresh = bundler.options.hot_module_reloading and
- bundler.options.allow_runtime and transpiler_options.runtime.react_fast_refresh;
+ bundler.options.react_fast_refresh = bundler.options.hot_module_reloading and
+ bundler.options.allow_runtime and
+ transpiler_options.runtime.react_fast_refresh;
const transpiler = allocator.create(Transpiler) catch unreachable;
transpiler.* = Transpiler{
@@ -845,7 +846,7 @@ pub fn finalize(
this: *Transpiler,
) callconv(.C) void {
this.bundler.log.deinit();
- this.scan_pass_result.named_imports.deinit();
+ this.scan_pass_result.named_imports.deinit(this.scan_pass_result.import_records.allocator);
this.scan_pass_result.import_records.deinit();
this.scan_pass_result.used_symbols.deinit();
if (this.buffer_writer != null) {
@@ -881,19 +882,7 @@ fn getParseResult(this: *Transpiler, allocator: std.mem.Allocator, code: []const
// .allocator = this.
};
- var parse_result = this.bundler.parse(parse_options, null);
-
- // necessary because we don't run the linker
- if (parse_result) |*res| {
- for (res.ast.import_records.slice()) |*import| {
- if (import.kind.isCommonJS()) {
- import.do_commonjs_transform_in_printer = true;
- import.module_id = @as(u32, @truncate(bun.hash(import.path.pretty)));
- }
- }
- }
-
- return parse_result;
+ return this.bundler.parse(parse_options, null);
}
pub fn scan(
diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp
index 2c69eb1a9fdc0..675e58e9c8d75 100644
--- a/src/bun.js/bindings/bindings.cpp
+++ b/src/bun.js/bindings/bindings.cpp
@@ -4394,9 +4394,7 @@ static void fromErrorInstance(ZigException* except, JSC::JSGlobalObject* global,
if (except->code == SYNTAX_ERROR_CODE) {
except->message = Bun::toStringRef(err->sanitizedMessageString(global));
} else if (JSC::JSValue message = obj->getIfPropertyExists(global, vm.propertyNames->message)) {
-
except->message = Bun::toStringRef(global, message);
-
} else {
except->message = Bun::toStringRef(err->sanitizedMessageString(global));
}
@@ -4787,7 +4785,7 @@ void JSC__JSValue__toZigException(JSC__JSValue jsException, JSC__JSGlobalObject*
if (JSC::Exception* jscException = JSC::jsDynamicCast(value)) {
if (JSC::ErrorInstance* error = JSC::jsDynamicCast(jscException->value())) {
- fromErrorInstance(exception, global, error, &jscException->stack(), value);
+ fromErrorInstance(exception, global, error, &jscException->stack(), jscException->value());
return;
}
}
diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig
index 37b7f83500ad0..27d04a7f71afa 100644
--- a/src/bun.js/bindings/bindings.zig
+++ b/src/bun.js/bindings/bindings.zig
@@ -1764,6 +1764,10 @@ pub const JSString = extern struct {
pub const name = "JSC::JSString";
pub const namespace = "JSC";
+ pub fn toJS(str: *JSString) JSValue {
+ return JSValue.fromCell(str);
+ }
+
pub fn toObject(this: *JSString, global: *JSGlobalObject) ?*JSObject {
return shim.cppFn("toObject", .{ this, global });
}
@@ -6025,7 +6029,7 @@ pub const JSValue = enum(JSValueReprInt) {
/// For native C++ classes extending JSCell, this retrieves s_info's name
pub fn getClassInfoName(this: JSValue) ?bun.String {
- if (!this.isObject()) return null;
+ if (!this.isCell()) return null;
var out: bun.String = bun.String.empty;
if (!JSC__JSValue__getClassInfoName(this, &out)) return null;
return out;
diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig
index f8ca831d13988..e0e976180856e 100644
--- a/src/bun.js/event_loop.zig
+++ b/src/bun.js/event_loop.zig
@@ -480,10 +480,7 @@ pub const Task = TaggedPointerUnion(.{
ShellAsyncSubprocessDone,
TimerObject,
bun.shell.Interpreter.Builtin.Yes.YesTask,
-
- bun.kit.DevServer.BundleTask,
- bun.kit.DevServer.HotReloadTask,
-
+ // bun.kit.DevServer.HotReloadTask,
ProcessWaiterThreadTask,
RuntimeTranspilerStore,
ServerAllConnectionsClosedTask,
@@ -1026,13 +1023,13 @@ pub const EventLoop = struct {
// special case: we return
return 0;
},
- @field(Task.Tag, @typeName(bun.kit.DevServer.HotReloadTask)) => {
- const transform_task = task.get(bun.kit.DevServer.HotReloadTask).?;
- transform_task.*.run();
- transform_task.deinit();
- // special case: we return
- return 0;
- },
+ // @field(Task.Tag, @typeName(bun.kit.DevServer.HotReloadTask)) => {
+ // const transform_task = task.get(bun.kit.DevServer.HotReloadTask).?;
+ // transform_task.*.run();
+ // transform_task.deinit();
+ // // special case: we return
+ // return 0;
+ // },
@field(Task.Tag, typeBaseName(@typeName(FSWatchTask))) => {
var transform_task: *FSWatchTask = task.get(FSWatchTask).?;
transform_task.*.run();
@@ -1245,15 +1242,9 @@ pub const EventLoop = struct {
var any: *ServerAllConnectionsClosedTask = task.get(ServerAllConnectionsClosedTask).?;
any.runFromJSThread(virtual_machine);
},
- @field(Task.Tag, typeBaseName(@typeName(bun.kit.DevServer.BundleTask))) => {
- task.get(bun.kit.DevServer.BundleTask).?.completeOnMainThread();
- },
- else => if (Environment.allow_assert) {
- bun.Output.prettyln("\nUnexpected tag: {s}\n", .{@tagName(task.tag())});
- } else {
- log("\nUnexpected tag: {s}\n", .{@tagName(task.tag())});
- unreachable;
+ else => {
+ bun.Output.panic("Unexpected tag: {s}", .{@tagName(task.tag())});
},
}
@@ -1702,8 +1693,7 @@ pub const MiniVM = struct {
}
pub inline fn incrementPendingUnrefCounter(this: @This()) void {
- _ = this; // autofix
-
+ _ = this;
@panic("FIXME TODO");
}
diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig
index 18f8f73e0b60d..71794e686e015 100644
--- a/src/bun.js/javascript.zig
+++ b/src/bun.js/javascript.zig
@@ -1178,7 +1178,7 @@ pub const VirtualMachine = struct {
}
}
- pub fn reload(this: *VirtualMachine) void {
+ pub fn reload(this: *VirtualMachine, _: *HotReloader.HotReloadTask) void {
Output.debug("Reloading...", .{});
const should_clear_terminal = !this.bundler.env.hasSetNoClearTerminalOnReload(!Output.enable_ansi_colors);
if (this.hot_reload == .watch) {
@@ -1620,7 +1620,6 @@ pub const VirtualMachine = struct {
// Avoid reading from tsconfig.json & package.json when we're in standalone mode
vm.bundler.configureLinkerWithAutoJSX(false);
- try vm.bundler.configureFramework(false);
vm.bundler.macro_context = js_ast.Macro.MacroContext.init(&vm.bundler);
@@ -1725,14 +1724,9 @@ pub const VirtualMachine = struct {
};
vm.bundler.configureLinker();
- try vm.bundler.configureFramework(false);
vm.bundler.macro_context = js_ast.Macro.MacroContext.init(&vm.bundler);
- if (opts.args.serve orelse false) {
- vm.bundler.linker.onImportCSS = Bun.onImportCSS;
- }
-
vm.global = ZigGlobalObject.create(
vm.console,
-1,
@@ -1873,14 +1867,9 @@ pub const VirtualMachine = struct {
vm.bundler.configureLinkerWithAutoJSX(false);
}
- try vm.bundler.configureFramework(false);
vm.smol = opts.smol;
vm.bundler.macro_context = js_ast.Macro.MacroContext.init(&vm.bundler);
- if (opts.args.serve orelse false) {
- vm.bundler.linker.onImportCSS = Bun.onImportCSS;
- }
-
vm.global = ZigGlobalObject.create(
vm.console,
@as(i32, @intCast(worker.execution_context_id)),
@@ -1964,14 +1953,9 @@ pub const VirtualMachine = struct {
};
vm.bundler.configureLinker();
- try vm.bundler.configureFramework(false);
vm.bundler.macro_context = js_ast.Macro.MacroContext.init(&vm.bundler);
- if (opts.args.serve orelse false) {
- vm.bundler.linker.onImportCSS = Bun.onImportCSS;
- }
-
vm.regular_event_loop.virtual_machine = vm;
vm.smol = opts.smol;
@@ -2423,18 +2407,18 @@ pub const VirtualMachine = struct {
}
if (JSC.HardcodedModule.Aliases.getWithEql(specifier, bun.String.eqlComptime, jsc_vm.bundler.options.target)) |hardcoded| {
- if (hardcoded.tag == .none) {
- resolveMaybeNeedsTrailingSlash(
- res,
- global,
- bun.String.init(hardcoded.path),
- source,
- query_string,
- is_esm,
- is_a_file_path,
- );
- return;
- }
+ // if (hardcoded.tag == .none) {
+ // resolveMaybeNeedsTrailingSlash(
+ // res,
+ // global,
+ // bun.String.init(hardcoded.path),
+ // source,
+ // query_string,
+ // is_esm,
+ // is_a_file_path,
+ // );
+ // return;
+ // }
res.* = ErrorableString.ok(bun.String.init(hardcoded.path));
return;
@@ -2674,8 +2658,7 @@ pub const VirtualMachine = struct {
)) {
.success => |r| r,
.failure => |e| {
- {
- }
+ {}
this.log.addErrorFmt(
null,
logger.Loc.Empty,
@@ -4027,9 +4010,18 @@ pub const VirtualMachine = struct {
return instance;
}
+ /// To satisfy the interface from NewHotReloader()
+ pub fn getLoaders(vm: *VirtualMachine) *bun.options.Loader.HashTable {
+ return &vm.bundler.options.loaders;
+ }
+
+ /// To satisfy the interface from NewHotReloader()
+ pub fn bustDirCache(vm: *VirtualMachine, path: []const u8) bool {
+ return vm.bundler.resolver.bustDirCache(path);
+ }
+
comptime {
- if (!JSC.is_bindgen)
- _ = Bun__remapStackFramePositions;
+ _ = Bun__remapStackFramePositions;
}
};
@@ -4120,7 +4112,7 @@ pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime
// get another hot reload request while we're reloading, we'll
// still enqueue it.
while (this.reloader.pending_count.swap(0, .monotonic) > 0) {
- this.reloader.ctx.reload();
+ this.reloader.ctx.reload(this);
}
}
diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig
index d6a5e0daeaa80..e2e79283cbfba 100644
--- a/src/bun.js/module_loader.zig
+++ b/src/bun.js/module_loader.zig
@@ -667,10 +667,10 @@ pub const RuntimeTranspilerStore = struct {
// In a benchmarking loading @babel/standalone 100 times:
//
// After ensureHash:
- // 354.00 ms 4.2% 354.00 ms WTF::StringImpl::hashSlowCase() const
+ // 354.00 ms 4.2% 354.00 ms WTF::StringImpl::hashSlowCase() const
//
// Before ensureHash:
- // 506.00 ms 6.1% 506.00 ms WTF::StringImpl::hashSlowCase() const
+ // 506.00 ms 6.1% 506.00 ms WTF::StringImpl::hashSlowCase() const
//
result.ensureHash();
@@ -2173,12 +2173,6 @@ pub const ModuleLoader = struct {
}
}
- if (jsc_vm.bundler.options.routes.asset_prefix_path.len > 0) {
- if (strings.hasPrefix(slice, jsc_vm.bundler.options.routes.asset_prefix_path)) {
- slice = slice[jsc_vm.bundler.options.routes.asset_prefix_path.len..];
- }
- }
-
string_to_use_for_source.* = slice;
if (strings.indexOfChar(slice, '?')) |i| {
@@ -2821,7 +2815,7 @@ pub const HardcodedModule = enum {
pub const Alias = struct {
path: string,
- tag: ImportRecord.Tag = ImportRecord.Tag.hardcoded,
+ tag: ImportRecord.Tag = .builtin,
};
pub const Aliases = struct {
diff --git a/src/bun.js/node/path_watcher.zig b/src/bun.js/node/path_watcher.zig
index 3203d7365d008..dbbc6b6798700 100644
--- a/src/bun.js/node/path_watcher.zig
+++ b/src/bun.js/node/path_watcher.zig
@@ -27,8 +27,8 @@ const StringOrBytesToDecode = FSWatcher.FSWatchTaskWindows.StringOrBytesToDecode
pub const PathWatcherManager = struct {
const options = @import("../../options.zig");
- pub const Watcher = GenericWatcher.NewWatcher(*PathWatcherManager);
const log = Output.scoped(.PathWatcherManager, false);
+ pub const Watcher = GenericWatcher.NewWatcher(*PathWatcherManager);
main_watcher: *Watcher,
watchers: bun.BabyList(?*PathWatcher) = .{},
@@ -148,6 +148,7 @@ pub const PathWatcherManager = struct {
.current_fd_task = bun.FDHashMap(*DirectoryRegisterTask).init(bun.default_allocator),
.watchers = watchers,
.main_watcher = try Watcher.init(
+ // PathWatcherManager,
this,
vm.bundler.fs,
bun.default_allocator,
diff --git a/src/bun.js/web_worker.zig b/src/bun.js/web_worker.zig
index 127c0751bc073..7f64f84842acf 100644
--- a/src/bun.js/web_worker.zig
+++ b/src/bun.js/web_worker.zig
@@ -249,11 +249,6 @@ pub const WebWorker = struct {
var b = &vm.bundler;
- b.configureRouter(false) catch {
- this.flushLogs();
- this.exitAndDeinit();
- return;
- };
b.configureDefines() catch {
this.flushLogs();
this.exitAndDeinit();
diff --git a/src/bun.js/webcore/encoding.zig b/src/bun.js/webcore/encoding.zig
index 6ac268370913c..cd8389eefd4d1 100644
--- a/src/bun.js/webcore/encoding.zig
+++ b/src/bun.js/webcore/encoding.zig
@@ -464,9 +464,9 @@ pub const TextEncoderStreamEncoder = struct {
};
// In a previous benchmark, counting the length took about as much time as allocating the buffer.
//
- // Benchmark Time % CPU (ns) Iterations Ratio
- // 288.00 ms 13.5% 288.00 ms simdutf::arm64::implementation::convert_latin1_to_utf8(char const*, unsigned long, char*) const
- // 278.00 ms 13.0% 278.00 ms simdutf::arm64::implementation::utf8_length_from_latin1(char const*, unsigned long) const
+ // Benchmark Time % CPU (ns) Iterations Ratio
+ // 288.00 ms 13.5% 288.00 ms simdutf::arm64::implementation::convert_latin1_to_utf8(char const*, unsigned long, char*) const
+ // 278.00 ms 13.0% 278.00 ms simdutf::arm64::implementation::utf8_length_from_latin1(char const*, unsigned long) const
//
//
var buffer = std.ArrayList(u8).initCapacity(bun.default_allocator, input.len + prepend_replacement_len) catch {
diff --git a/src/bun.zig b/src/bun.zig
index ca1f8415f46f1..db457eba7b050 100644
--- a/src/bun.zig
+++ b/src/bun.zig
@@ -34,8 +34,6 @@ pub const auto_allocator: std.mem.Allocator = if (!use_mimalloc)
else
@import("./memory_allocator.zig").auto_allocator;
-pub const huge_allocator_threshold: comptime_int = @import("./memory_allocator.zig").huge_threshold;
-
pub const callmod_inline: std.builtin.CallModifier = if (builtin.mode == .Debug) .auto else .always_inline;
pub const callconv_inline: std.builtin.CallingConvention = if (builtin.mode == .Debug) .Unspecified else .Inline;
@@ -1000,13 +998,14 @@ pub const StringArrayHashMapContext = struct {
pub const Prehashed = struct {
value: u32,
input: []const u8,
+
pub fn hash(this: @This(), s: []const u8) u32 {
if (s.ptr == this.input.ptr and s.len == this.input.len)
return this.value;
return @as(u32, @truncate(std.hash.Wyhash.hash(0, s)));
}
- pub fn eql(_: @This(), a: []const u8, b: []const u8) bool {
+ pub fn eql(_: @This(), a: []const u8, b: []const u8, _: usize) bool {
return strings.eqlLong(a, b, true);
}
};
@@ -2991,6 +2990,12 @@ pub noinline fn outOfMemory() noreturn {
crash_handler.crashHandler(.out_of_memory, null, @returnAddress());
}
+pub fn todoPanic(src: std.builtin.SourceLocation, comptime format: string, args: anytype) noreturn {
+ @setCold(true);
+ bun.Analytics.Features.todo_panic = 1;
+ Output.panic("TODO: " ++ format ++ " ({s}:{d})", args ++ .{ src.file, src.line });
+}
+
/// Wrapper around allocator.create(T) that safely initializes the pointer. Prefer this over
/// `std.mem.Allocator.create`, but prefer using `bun.new` over `create(default_allocator, T, t)`
pub fn create(allocator: std.mem.Allocator, comptime T: type, t: T) *T {
diff --git a/src/bun_js.zig b/src/bun_js.zig
index 7144c3ae49b7d..828a9b0b1de87 100644
--- a/src/bun_js.zig
+++ b/src/bun_js.zig
@@ -107,9 +107,6 @@ pub const Run = struct {
b.options.env.behavior = .load_all_without_inlining;
- b.configureRouter(false) catch {
- failWithBuildError(vm);
- };
b.configureDefines() catch {
failWithBuildError(vm);
};
@@ -252,9 +249,6 @@ pub const Run = struct {
.unspecified => {},
}
- b.configureRouter(false) catch {
- failWithBuildError(vm);
- };
b.configureDefines() catch {
failWithBuildError(vm);
};
diff --git a/src/bundler.zig b/src/bundler.zig
index 540f879bd79d5..56406a9be0584 100644
--- a/src/bundler.zig
+++ b/src/bundler.zig
@@ -322,6 +322,12 @@ pub const PluginRunner = struct {
}
};
+/// This structure was the JavaScript bundler before bundle_v2 was written. It now
+/// acts mostly as a configuration object, but it also contains stateful logic around
+/// logging errors (.log) and module resolution (.resolve_queue)
+///
+/// This object is not exclusive to bundle_v2/Bun.build, one of these is stored
+/// on every VM so that the options can be used for transpilation.
pub const Bundler = struct {
options: options.BundleOptions,
log: *logger.Log,
@@ -581,15 +587,7 @@ pub const Bundler = struct {
defer js_ast.Expr.Data.Store.reset();
defer js_ast.Stmt.Data.Store.reset();
- if (this.options.framework) |framework| {
- if (this.options.target.isClient()) {
- try this.options.loadDefines(this.allocator, this.env, &framework.client.env);
- } else {
- try this.options.loadDefines(this.allocator, this.env, &framework.server.env);
- }
- } else {
- try this.options.loadDefines(this.allocator, this.env, &this.options.env);
- }
+ try this.options.loadDefines(this.allocator, this.env, &this.options.env);
if (this.options.define.dots.get("NODE_ENV")) |NODE_ENV| {
if (NODE_ENV.len > 0 and NODE_ENV[0].data.value == .e_string and NODE_ENV[0].data.value.e_string.eqlComptime("production")) {
@@ -598,97 +596,6 @@ pub const Bundler = struct {
}
}
- pub fn configureFramework(
- this: *Bundler,
- comptime load_defines: bool,
- ) !void {
- if (this.options.framework) |*framework| {
- if (framework.needsResolveFromPackage()) {
- var route_config = this.options.routes;
- var pair = PackageJSON.FrameworkRouterPair{ .framework = framework, .router = &route_config };
-
- if (framework.development) {
- try this.resolver.resolveFramework(framework.package, &pair, .development, load_defines);
- } else {
- try this.resolver.resolveFramework(framework.package, &pair, .production, load_defines);
- }
-
- if (this.options.areDefinesUnset()) {
- if (this.options.target.isClient()) {
- this.options.env = framework.client.env;
- } else {
- this.options.env = framework.server.env;
- }
- }
-
- if (pair.loaded_routes) {
- this.options.routes = route_config;
- }
- framework.resolved = true;
- this.options.framework = framework.*;
- } else if (!framework.resolved) {
- Output.panic("directly passing framework path is not implemented yet!", .{});
- }
- }
- }
-
- pub fn configureFrameworkWithResolveResult(this: *Bundler, comptime client: bool) !?_resolver.Result {
- if (this.options.framework != null) {
- try this.configureFramework(true);
- if (comptime client) {
- if (this.options.framework.?.client.isEnabled()) {
- return try this.resolver.resolve(this.fs.top_level_dir, this.options.framework.?.client.path, .stmt);
- }
-
- if (this.options.framework.?.fallback.isEnabled()) {
- return try this.resolver.resolve(this.fs.top_level_dir, this.options.framework.?.fallback.path, .stmt);
- }
- } else {
- if (this.options.framework.?.server.isEnabled()) {
- return try this.resolver.resolve(this.fs.top_level_dir, this.options.framework.?.server, .stmt);
- }
- }
- }
-
- return null;
- }
-
- pub fn configureRouter(this: *Bundler, comptime load_defines: bool) !void {
- try this.configureFramework(load_defines);
- defer {
- if (load_defines) {
- this.configureDefines() catch {};
- }
- }
-
- if (this.options.routes.routes_enabled) {
- const dir_info_ = try this.resolver.readDirInfo(this.options.routes.dir);
- const dir_info = dir_info_ orelse return error.MissingRoutesDir;
-
- this.options.routes.dir = dir_info.abs_path;
-
- this.router = try Router.init(this.fs, this.allocator, this.options.routes);
- try this.router.?.loadRoutes(
- this.log,
- dir_info,
- Resolver,
- &this.resolver,
- this.fs.top_level_dir,
- );
- this.router.?.routes.client_framework_enabled = this.options.isFrontendFrameworkEnabled();
- return;
- }
-
- // If we get this far, it means they're trying to run the bundler without a preconfigured router
- if (this.options.entry_points.len > 0) {
- this.options.routes.routes_enabled = false;
- }
-
- if (this.router) |*router| {
- router.routes.client_framework_enabled = this.options.isFrontendFrameworkEnabled();
- }
- }
-
pub fn resetStore(_: *const Bundler) void {
js_ast.Expr.Data.Store.reset();
js_ast.Stmt.Data.Store.reset();
@@ -709,6 +616,7 @@ pub const Bundler = struct {
input_fd: ?StoredFileDescriptorType,
empty: bool = false,
};
+
pub fn buildWithResolveResult(
bundler: *Bundler,
resolve_result: _resolver.Result,
@@ -985,7 +893,7 @@ pub const Bundler = struct {
&writer,
.esm,
),
- .bun, .bun_macro => try bundler.print(
+ .bun, .bun_macro, .kit_server_components_ssr => try bundler.print(
result,
*js_printer.BufferPrinter,
&writer,
@@ -1157,8 +1065,7 @@ pub const Bundler = struct {
js_ast.Symbol.Map.initList(symbols),
source,
false,
- js_printer.Options{
- .externals = ast.externals,
+ .{
.runtime_imports = ast.runtime_imports,
.require_ref = ast.require_ref,
.css_import_behavior = bundler.options.cssImportBehavior(),
@@ -1180,8 +1087,7 @@ pub const Bundler = struct {
js_ast.Symbol.Map.initList(symbols),
source,
false,
- js_printer.Options{
- .externals = ast.externals,
+ .{
.runtime_imports = ast.runtime_imports,
.require_ref = ast.require_ref,
.source_map_handler = source_map_context,
@@ -1204,8 +1110,7 @@ pub const Bundler = struct {
js_ast.Symbol.Map.initList(symbols),
source,
is_bun,
- js_printer.Options{
- .externals = ast.externals,
+ .{
.runtime_imports = ast.runtime_imports,
.require_ref = ast.require_ref,
.css_import_behavior = bundler.options.cssImportBehavior(),
@@ -1444,10 +1349,10 @@ pub const Bundler = struct {
opts.features.react_fast_refresh = opts.features.hot_module_reloading and
jsx.parse and
- bundler.options.jsx.supports_fast_refresh;
+ bundler.options.react_fast_refresh;
opts.filepath_hash_for_hmr = file_hash orelse 0;
opts.features.auto_import_jsx = bundler.options.auto_import_jsx;
- opts.warn_about_unbundled_modules = target.isNotBun();
+ opts.warn_about_unbundled_modules = !target.isBun();
opts.features.inject_jest_globals = this_parse.inject_jest_globals;
opts.features.minify_syntax = bundler.options.minify_syntax;
diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig
index 877031490772f..580c97f04803a 100644
--- a/src/bundler/bundle_v2.zig
+++ b/src/bundler/bundle_v2.zig
@@ -43,7 +43,6 @@
//
const Bundler = bun.Bundler;
const bun = @import("root").bun;
-const from = bun.from;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
@@ -127,6 +126,8 @@ const debugTreeShake = Output.scoped(.TreeShake, true);
const BitSet = bun.bit_set.DynamicBitSetUnmanaged;
const Async = bun.Async;
+const kit = bun.kit;
+
const logPartDependencyTree = Output.scoped(.part_dep_tree, false);
fn tracer(comptime src: std.builtin.SourceLocation, comptime name: [:0]const u8) bun.tracy.Ctx {
@@ -326,131 +327,189 @@ const Watcher = bun.JSC.NewHotReloader(BundleV2, EventLoop, true);
pub const BundleV2 = struct {
bundler: *Bundler,
+ /// When Server Component is enabled, this is used for the client bundles
+ /// and `bundler` is used for the server bundles.
client_bundler: *Bundler,
- server_bundler: *Bundler,
- graph: Graph = Graph{},
- linker: LinkerContext = LinkerContext{ .loop = undefined },
- bun_watcher: ?*Watcher.Watcher = null,
- // kit_watcher: ?*bun.kit.DevServer.HotReloader.Watcher = null,
- plugins: ?*JSC.API.JSBundler.Plugin = null,
- completion: ?CompletionPtr = null,
- source_code_length: usize = 0,
-
- // There is a race condition where an onResolve plugin may schedule a task on the bundle thread before it's parsing task completes
+ /// See kit.Framework.ServerComponents.separate_ssr_graph
+ ssr_bundler: *Bundler,
+ /// When Bun Kit is used, the resolved framework is passed here
+ framework: ?kit.Framework,
+ graph: Graph,
+ linker: LinkerContext,
+ bun_watcher: ?*Watcher.Watcher,
+ plugins: ?*JSC.API.JSBundler.Plugin,
+ completion: ?*JSBundleCompletionTask,
+ source_code_length: usize,
+
+ /// There is a race condition where an onResolve plugin may schedule a task on the bundle thread before it's parsing task completes
resolve_tasks_waiting_for_import_source_index: std.AutoArrayHashMapUnmanaged(Index.Int, BabyList(struct { to_source_index: Index, import_record_index: u32 })) = .{},
/// Allocations not tracked by a threadlocal heap
- free_list: std.ArrayList(string) = std.ArrayList(string).init(bun.default_allocator),
+ free_list: std.ArrayList([]const u8) = std.ArrayList([]const u8).init(bun.default_allocator),
unique_key: u64 = 0,
dynamic_import_entry_points: std.AutoArrayHashMap(Index.Int, void) = undefined,
- pub const CompletionPtr = union(enum) {
- js: *JSBundleCompletionTask,
- kit: *bun.kit.DevServer.BundleTask,
-
- pub fn log(ptr: CompletionPtr) *bun.logger.Log {
- return switch (ptr) {
- inline else => |inner| &inner.log,
- };
- }
+ const KitOptions = struct {
+ framework: kit.Framework,
+ client_bundler: *Bundler,
+ ssr_bundler: *Bundler,
};
+ const ResolvedFramework = struct {};
+
const debug = Output.scoped(.Bundle, false);
pub inline fn loop(this: *BundleV2) *EventLoop {
return &this.linker.loop;
}
- pub fn findReachableFiles(this: *BundleV2) ![]Index {
- const trace = tracer(@src(), "findReachableFiles");
- defer trace.end();
+ /// Most of the time, accessing .bundler directly is OK. This is only
+ /// needed when it is important to distinct between client and server
+ ///
+ /// Note that .log, .allocator, and other things are shared
+ /// between the three bundler configurations
+ pub inline fn bundlerForTarget(this: *BundleV2, target: options.Target) *Bundler {
+ return if (!this.bundler.options.server_components)
+ this.bundler
+ else switch (target) {
+ else => this.bundler,
+ .browser => this.client_bundler,
+ .kit_server_components_ssr => this.ssr_bundler,
+ };
+ }
- const Visitor = struct {
- reachable: std.ArrayList(Index),
- visited: bun.bit_set.DynamicBitSet = undefined,
- all_import_records: []ImportRecord.List,
- redirects: []u32,
- redirect_map: PathToSourceIndexMap,
- dynamic_import_entry_points: *std.AutoArrayHashMap(Index.Int, void),
-
- const MAX_REDIRECTS: usize = 64;
-
- // Find all files reachable from all entry points. This order should be
- // deterministic given that the entry point order is deterministic, since the
- // returned order is the postorder of the graph traversal and import record
- // order within a given file is deterministic.
- pub fn visit(v: *@This(), source_index: Index, was_dynamic_import: bool, comptime check_dynamic_imports: bool) void {
- if (source_index.isInvalid()) return;
-
- if (v.visited.isSet(source_index.get())) {
- if (comptime check_dynamic_imports) {
- if (was_dynamic_import) {
- v.dynamic_import_entry_points.put(source_index.get(), {}) catch unreachable;
- }
+ /// Same semantics as bundlerForTarget for `path_to_source_index_map`
+ pub inline fn pathToSourceIndexMap(this: *BundleV2, target: options.Target) *PathToSourceIndexMap {
+ return if (!this.bundler.options.server_components)
+ &this.graph.path_to_source_index_map
+ else switch (target) {
+ else => &this.graph.path_to_source_index_map,
+ .browser => &this.graph.client_path_to_source_index_map,
+ .kit_server_components_ssr => &this.graph.ssr_path_to_source_index_map,
+ };
+ }
+
+ const ReachableFileVisitor = struct {
+ reachable: std.ArrayList(Index),
+ visited: bun.bit_set.DynamicBitSet,
+ all_import_records: []ImportRecord.List,
+ redirects: []u32,
+ redirect_map: PathToSourceIndexMap,
+ dynamic_import_entry_points: *std.AutoArrayHashMap(Index.Int, void),
+ /// Files which are Server Component Boundaries
+ scb_bitset: ?bun.bit_set.DynamicBitSetUnmanaged,
+ scb_list: ServerComponentBoundary.List.Slice,
+
+ const MAX_REDIRECTS: usize = 64;
+
+ // Find all files reachable from all entry points. This order should be
+ // deterministic given that the entry point order is deterministic, since the
+ // returned order is the postorder of the graph traversal and import record
+ // order within a given file is deterministic.
+ pub fn visit(v: *@This(), source_index: Index, was_dynamic_import: bool, comptime check_dynamic_imports: bool) void {
+ if (source_index.isInvalid()) return;
+
+ if (v.visited.isSet(source_index.get())) {
+ if (comptime check_dynamic_imports) {
+ if (was_dynamic_import) {
+ v.dynamic_import_entry_points.put(source_index.get(), {}) catch unreachable;
}
- return;
}
- v.visited.set(source_index.get());
-
- const import_record_list_id = source_index;
- // when there are no import records, v index will be invalid
- if (import_record_list_id.get() < v.all_import_records.len) {
- const import_records = v.all_import_records[import_record_list_id.get()].slice();
- for (import_records) |*import_record| {
- var other_source = import_record.source_index;
- if (other_source.isValid()) {
- var redirect_count: usize = 0;
- while (getRedirectId(v.redirects[other_source.get()])) |redirect_id| : (redirect_count += 1) {
- var other_import_records = v.all_import_records[other_source.get()].slice();
- const other_import_record = &other_import_records[redirect_id];
- import_record.source_index = other_import_record.source_index;
- import_record.path = other_import_record.path;
- other_source = other_import_record.source_index;
- if (redirect_count == MAX_REDIRECTS) {
- import_record.path.is_disabled = true;
- import_record.source_index = Index.invalid;
- break;
- }
-
- // Handle redirects to a builtin or external module
- // https://github.com/oven-sh/bun/issues/3764
- if (!other_source.isValid()) {
- break;
- }
+ return;
+ }
+ v.visited.set(source_index.get());
+
+ if (v.scb_bitset) |scb_bitset| {
+ if (scb_bitset.isSet(source_index.get())) {
+ const scb_index = v.scb_list.getIndex(source_index.get()) orelse unreachable;
+ v.visit(Index.init(v.scb_list.list.items(.reference_source_index)[scb_index]), false, check_dynamic_imports);
+ v.visit(Index.init(v.scb_list.list.items(.ssr_source_index)[scb_index]), false, check_dynamic_imports);
+ }
+ }
+
+ const import_record_list_id = source_index;
+ // when there are no import records, v index will be invalid
+ if (import_record_list_id.get() < v.all_import_records.len) {
+ const import_records = v.all_import_records[import_record_list_id.get()].slice();
+ for (import_records) |*import_record| {
+ var other_source = import_record.source_index;
+ if (other_source.isValid()) {
+ var redirect_count: usize = 0;
+ while (getRedirectId(v.redirects[other_source.get()])) |redirect_id| : (redirect_count += 1) {
+ var other_import_records = v.all_import_records[other_source.get()].slice();
+ const other_import_record = &other_import_records[redirect_id];
+ import_record.source_index = other_import_record.source_index;
+ import_record.path = other_import_record.path;
+ other_source = other_import_record.source_index;
+ if (redirect_count == MAX_REDIRECTS) {
+ import_record.path.is_disabled = true;
+ import_record.source_index = Index.invalid;
+ break;
}
- v.visit(import_record.source_index, check_dynamic_imports and import_record.kind == .dynamic, check_dynamic_imports);
+ // Handle redirects to a builtin or external module
+ // https://github.com/oven-sh/bun/issues/3764
+ if (!other_source.isValid()) {
+ break;
+ }
}
- }
- // Redirects replace the source file with another file
- if (getRedirectId(v.redirects[source_index.get()])) |redirect_id| {
- const redirect_source_index = v.all_import_records[source_index.get()].slice()[redirect_id].source_index.get();
- v.visit(Index.source(redirect_source_index), was_dynamic_import, check_dynamic_imports);
- return;
+ v.visit(import_record.source_index, check_dynamic_imports and import_record.kind == .dynamic, check_dynamic_imports);
}
}
- // Each file must come after its dependencies
- v.reachable.append(source_index) catch unreachable;
- if (comptime check_dynamic_imports) {
- if (was_dynamic_import) {
- v.dynamic_import_entry_points.put(source_index.get(), {}) catch unreachable;
- }
+ // Redirects replace the source file with another file
+ if (getRedirectId(v.redirects[source_index.get()])) |redirect_id| {
+ const redirect_source_index = v.all_import_records[source_index.get()].slice()[redirect_id].source_index.get();
+ v.visit(Index.source(redirect_source_index), was_dynamic_import, check_dynamic_imports);
+ return;
}
}
- };
+
+ // Each file must come after its dependencies
+ v.reachable.append(source_index) catch unreachable;
+ if (comptime check_dynamic_imports) {
+ if (was_dynamic_import) {
+ v.dynamic_import_entry_points.put(source_index.get(), {}) catch unreachable;
+ }
+ }
+ }
+ };
+
+ pub fn findReachableFiles(this: *BundleV2) ![]Index {
+ const trace = tracer(@src(), "findReachableFiles");
+ defer trace.end();
+
+ // Create a quick index for server-component boundaries.
+ // We need to mark the generated files as reachable, or else many files will appear missing.
+ var sfa = std.heap.stackFallback(4096, this.graph.allocator);
+ const stack_alloc = sfa.get();
+ var scb_bitset = if (this.graph.server_component_boundaries.list.len > 0) brk: {
+ var scb_bitset = try bun.bit_set.DynamicBitSetUnmanaged.initEmpty(stack_alloc, this.graph.input_files.len);
+ const scbs = this.graph.server_component_boundaries.list.slice();
+ for (scbs.items(.source_index)) |source_index| {
+ scb_bitset.set(source_index);
+ // insert the other one?
+ }
+ break :brk scb_bitset;
+ } else null;
+ defer if (scb_bitset) |*b| b.deinit(stack_alloc);
this.dynamic_import_entry_points = std.AutoArrayHashMap(Index.Int, void).init(this.graph.allocator);
- var visitor = Visitor{
+ var visitor = ReachableFileVisitor{
.reachable = try std.ArrayList(Index).initCapacity(this.graph.allocator, this.graph.entry_points.items.len + 1),
.visited = try bun.bit_set.DynamicBitSet.initEmpty(this.graph.allocator, this.graph.input_files.len),
.redirects = this.graph.ast.items(.redirect_import_record_index),
.all_import_records = this.graph.ast.items(.import_records),
.redirect_map = this.graph.path_to_source_index_map,
.dynamic_import_entry_points = &this.dynamic_import_entry_points,
+ .scb_bitset = scb_bitset,
+ .scb_list = if (scb_bitset != null)
+ this.graph.server_component_boundaries.slice()
+ else
+ undefined, // will never be read since the above bitset is `null`
};
defer visitor.visited.deinit();
@@ -489,14 +548,15 @@ pub const BundleV2 = struct {
import_record: bun.JSC.API.JSBundler.Resolve.MiniImportRecord,
target: options.Target,
) void {
- var resolve_result = this.bundler.resolver.resolve(
+ const bundler = this.bundlerForTarget(target);
+ var resolve_result = bundler.resolver.resolve(
Fs.PathName.init(import_record.source_file).dirWithTrailingSlash(),
import_record.specifier,
import_record.kind,
) catch |err| {
var handles_import_errors = false;
var source: ?*const Logger.Source = null;
- const log = this.completion.?.log();
+ const log = &this.completion.?.log;
if (import_record.importer_source_index) |importer| {
var record: *ImportRecord = &this.graph.ast.items(.import_records)[importer].slice()[import_record.import_record_index];
@@ -518,7 +578,7 @@ pub const BundleV2 = struct {
if (!handles_import_errors) {
if (isPackagePath(import_record.specifier)) {
- if (target.isWebLike() and options.ExternalModules.isNodeBuiltin(path_to_use)) {
+ if (target == .browser and options.ExternalModules.isNodeBuiltin(path_to_use)) {
addError(
log,
source,
@@ -582,7 +642,7 @@ pub const BundleV2 = struct {
if (path.pretty.ptr == path.text.ptr) {
// TODO: outbase
- const rel = bun.path.relativePlatform(this.bundler.fs.top_level_dir, path.text, .loose, false);
+ const rel = bun.path.relativePlatform(bundler.fs.top_level_dir, path.text, .loose, false);
path.pretty = this.graph.allocator.dupe(u8, rel) catch bun.outOfMemory();
}
path.assertPrettyIsValid();
@@ -597,15 +657,9 @@ pub const BundleV2 = struct {
}
}
- const entry = this.graph.path_to_source_index_map.getOrPut(this.graph.allocator, path.hashKey()) catch bun.outOfMemory();
+ const entry = this.pathToSourceIndexMap(target).getOrPut(this.graph.allocator, path.hashKey()) catch bun.outOfMemory();
if (!entry.found_existing) {
- path.* = path.dupeAllocFixPretty(this.graph.allocator) catch bun.outOfMemory();
-
- // We need to parse this
- const source_index = Index.init(@as(u32, @intCast(this.graph.ast.len)));
- entry.value_ptr.* = source_index.get();
- out_source_index = source_index;
- this.graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable;
+ path.* = this.pathWithPrettyInitialized(path.*, target) catch bun.outOfMemory();
const loader = brk: {
if (import_record.importer_source_index) |importer| {
var record: *ImportRecord = &this.graph.ast.items(.import_records)[importer].slice()[import_record.import_record_index];
@@ -614,43 +668,20 @@ pub const BundleV2 = struct {
}
}
- break :brk path.loader(&this.bundler.options.loaders) orelse options.Loader.file;
+ break :brk path.loader(&bundler.options.loaders) orelse options.Loader.file;
};
-
- this.graph.input_files.append(bun.default_allocator, .{
- .source = .{
+ const idx = this.enqueueParseTask(
+ &resolve_result,
+ .{
.path = path.*,
.key_path = path.*,
.contents = "",
- .index = source_index,
- },
- .loader = loader,
- .side_effects = switch (loader) {
- .text, .json, .toml, .file => _resolver.SideEffects.no_side_effects__pure_data,
- else => _resolver.SideEffects.has_side_effects,
},
- }) catch bun.outOfMemory();
- var task = this.graph.allocator.create(ParseTask) catch bun.outOfMemory();
- task.* = ParseTask.init(&resolve_result, source_index, this);
- task.loader = loader;
- task.jsx = this.bundler.options.jsx;
- task.task.node.next = null;
- task.tree_shaking = this.linker.options.tree_shaking;
- task.known_target = import_record.original_target;
-
- _ = @atomicRmw(usize, &this.graph.parse_pending, .Add, 1, .monotonic);
-
- // Handle onLoad plugins
- if (!this.enqueueOnLoadPluginIfNeeded(task)) {
- if (loader.shouldCopyForBundling()) {
- var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()];
- additional_files.push(this.graph.allocator, .{ .source_index = task.source_index.get() }) catch unreachable;
- this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data;
- this.graph.estimated_file_loader_count += 1;
- }
-
- this.graph.pool.pool.schedule(ThreadPoolLib.Batch.from(&task.task));
- }
+ loader,
+ import_record.original_target,
+ ) catch bun.outOfMemory();
+ entry.value_ptr.* = idx;
+ out_source_index = Index.init(idx);
} else {
out_source_index = Index.init(entry.value_ptr.*);
}
@@ -669,6 +700,7 @@ pub const BundleV2 = struct {
batch: *ThreadPoolLib.Batch,
resolve: _resolver.Result,
is_entry_point: bool,
+ target: options.Target,
) !?Index.Int {
var result = resolve;
var path = result.path() orelse return null;
@@ -681,12 +713,7 @@ pub const BundleV2 = struct {
const source_index = Index.source(this.graph.input_files.len);
const loader = this.bundler.options.loaders.get(path.name.ext) orelse .file;
- if (path.pretty.ptr == path.text.ptr) {
- // TODO: outbase
- const rel = bun.path.relativePlatform(this.bundler.fs.top_level_dir, path.text, .loose, false);
- path.pretty = this.graph.allocator.dupe(u8, rel) catch bun.outOfMemory();
- }
- path.* = try path.dupeAllocFixPretty(this.graph.allocator);
+ path.* = this.pathWithPrettyInitialized(path.*, target) catch bun.outOfMemory();
path.assertPrettyIsValid();
entry.value_ptr.* = source_index.get();
this.graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable;
@@ -707,6 +734,7 @@ pub const BundleV2 = struct {
task.task.node.next = null;
task.tree_shaking = this.linker.options.tree_shaking;
task.is_entry_point = is_entry_point;
+ task.known_target = target;
// Handle onLoad plugins as entry points
if (!this.enqueueOnLoadPluginIfNeeded(task)) {
@@ -725,6 +753,7 @@ pub const BundleV2 = struct {
pub fn init(
bundler: *ThisBundler,
+ kit_options: ?KitOptions,
allocator: std.mem.Allocator,
event_loop: EventLoop,
enable_reloading: bool,
@@ -740,20 +769,35 @@ pub const BundleV2 = struct {
this.* = .{
.bundler = bundler,
.client_bundler = bundler,
- .server_bundler = bundler,
+ .ssr_bundler = bundler,
+ .framework = null,
.graph = .{
.pool = undefined,
.heap = heap orelse try ThreadlocalArena.init(),
.allocator = undefined,
+ .kit_referenced_server_data = false,
+ .kit_referenced_client_data = false,
},
.linker = .{
.loop = event_loop,
.graph = .{
.allocator = undefined,
- .bundler_graph = undefined,
},
},
+ .bun_watcher = null,
+ .plugins = null,
+ .completion = null,
+ .source_code_length = 0,
};
+ if (kit_options) |ko| {
+ this.client_bundler = ko.client_bundler;
+ this.ssr_bundler = ko.ssr_bundler;
+ this.framework = ko.framework;
+ this.linker.framework = &this.framework.?;
+ bun.assert(bundler.options.server_components);
+ bun.assert(this.client_bundler.options.server_components);
+ bun.assert(this.ssr_bundler.options.server_components);
+ }
this.linker.graph.allocator = this.graph.heap.allocator();
this.graph.allocator = this.linker.graph.allocator;
this.bundler.allocator = this.graph.allocator;
@@ -763,8 +807,8 @@ pub const BundleV2 = struct {
this.bundler.log.clone_line_text = true;
// We don't expose an option to disable this. Kit requires tree-shaking
- // disabled since every export is always referenced in case a future
- // module depends on a previously unused export.
+ // disabled since every export must is always exist in case a future
+ // module starts depending on it.
if (this.bundler.options.output_format == .internal_kit_dev) {
this.bundler.options.tree_shaking = false;
this.bundler.resolver.opts.tree_shaking = false;
@@ -773,7 +817,6 @@ pub const BundleV2 = struct {
this.bundler.resolver.opts.tree_shaking = true;
}
- this.linker.graph.bundler_graph = &this.graph;
this.linker.resolver = &this.bundler.resolver;
this.linker.graph.code_splitting = bundler.options.code_splitting;
this.graph.code_splitting = bundler.options.code_splitting;
@@ -789,6 +832,7 @@ pub const BundleV2 = struct {
this.linker.options.public_path = bundler.options.public_path;
this.linker.options.target = bundler.options.target;
this.linker.options.output_format = bundler.options.output_format;
+ this.linker.kit_dev_server = bundler.options.kit;
var pool = try this.graph.allocator.create(ThreadPool);
if (enable_reloading) {
@@ -804,17 +848,10 @@ pub const BundleV2 = struct {
thread_pool,
);
- // sanity checks for kit
- if (this.bundler.options.output_format == .internal_kit_dev) {
- if (this.bundler.options.compile) @panic("TODO: internal_kit_dev does not support compile");
- if (this.bundler.options.code_splitting) @panic("TODO: internal_kit_dev does not support code splitting");
- if (this.bundler.options.transform_only) @panic("TODO: internal_kit_dev does not support transform_only");
- }
-
return this;
}
- pub fn enqueueEntryPoints(this: *BundleV2, user_entry_points: []const string) !ThreadPoolLib.Batch {
+ pub fn enqueueEntryPoints(this: *BundleV2, user_entry_points: []const []const u8, client_entry_points: []const []const u8) !ThreadPoolLib.Batch {
var batch = ThreadPoolLib.Batch{};
{
@@ -841,22 +878,9 @@ pub const BundleV2 = struct {
batch.push(ThreadPoolLib.Batch.from(&runtime_parse_task.task));
}
- if (this.bundler.router) |router| {
- defer this.bundler.resetStore();
- Analytics.Features.filesystem_router += 1;
-
- const entry_points = try router.getEntryPoints();
- try this.graph.entry_points.ensureUnusedCapacity(this.graph.allocator, entry_points.len);
- try this.graph.input_files.ensureUnusedCapacity(this.graph.allocator, entry_points.len);
- try this.graph.path_to_source_index_map.ensureUnusedCapacity(this.graph.allocator, @as(u32, @truncate(entry_points.len)));
-
- for (entry_points) |entry_point| {
- const resolved = this.bundler.resolveEntryPoint(entry_point) catch continue;
- if (try this.enqueueItem(null, &batch, resolved, true)) |source_index| {
- this.graph.entry_points.append(this.graph.allocator, Index.source(source_index)) catch unreachable;
- } else {}
- }
- } else {}
+ // Kit has two source indexes which are computed at the end of the
+ // Scan+Parse phase, but reserved now so that resolution works.
+ try this.reserveSourceIndexesForKit();
{
// Setup entry points
@@ -866,7 +890,14 @@ pub const BundleV2 = struct {
for (user_entry_points) |entry_point| {
const resolved = this.bundler.resolveEntryPoint(entry_point) catch continue;
- if (try this.enqueueItem(null, &batch, resolved, true)) |source_index| {
+ if (try this.enqueueItem(null, &batch, resolved, true, this.bundler.options.target)) |source_index| {
+ this.graph.entry_points.append(this.graph.allocator, Index.source(source_index)) catch unreachable;
+ } else {}
+ }
+
+ for (client_entry_points) |entry_point| {
+ const resolved = this.bundler.resolveEntryPoint(entry_point) catch continue;
+ if (try this.enqueueItem(null, &batch, resolved, true, .browser)) |source_index| {
this.graph.entry_points.append(this.graph.allocator, Index.source(source_index)) catch unreachable;
} else {}
}
@@ -891,160 +922,256 @@ pub const BundleV2 = struct {
}
}
- pub fn enqueueShadowEntryPoints(this: *BundleV2) !void {
- const trace = tracer(@src(), "enqueueShadowEntryPoints");
- defer trace.end();
- const allocator = this.graph.allocator;
-
- // TODO: make this not slow
- {
- // process redirects
- const initial_reachable = try this.findReachableFiles();
- allocator.free(initial_reachable);
- this.dynamic_import_entry_points.deinit();
- }
-
- const bitset_length = this.graph.input_files.len;
- var react_client_component_boundary = bun.bit_set.DynamicBitSet.initEmpty(allocator, bitset_length) catch unreachable;
- defer react_client_component_boundary.deinit();
- var any_client = false;
+ /// This generates the two asts for 'bun:kit/client' and 'bun:kit/server'. Both are generated
+ /// at the same time in one pass over the SBC list.
+ pub fn processServerComponentManifestFiles(this: *BundleV2) OOM!void {
+ // If Kit is not being used, do nothing
+ const fw = this.framework orelse return;
+ const sc = fw.server_components orelse return;
+
+ if (this.graph.kit_referenced_client_data) bun.todoPanic(@src(), "implement generation for 'bun:kit/client'", .{});
+ if (!this.graph.kit_referenced_server_data) return;
+
+ const alloc = this.graph.allocator;
+
+ var server = try AstBuilder.init(this.graph.allocator, &kit.server_virtual_source, this.bundler.options.hot_module_reloading);
+ var client = try AstBuilder.init(this.graph.allocator, &kit.client_virtual_source, this.bundler.options.hot_module_reloading);
+
+ var server_manifest_props: std.ArrayListUnmanaged(G.Property) = .{};
+ var client_manifest_props: std.ArrayListUnmanaged(G.Property) = .{};
+
+ const scbs = this.graph.server_component_boundaries.list.slice();
+ const sources = this.graph.input_files.items(.source);
+ const named_exports_array = this.graph.ast.items(.named_exports);
+
+ const id_string = server.newExpr(E.String{ .data = "id" });
+ const name_string = server.newExpr(E.String{ .data = "name" });
+ const chunks_string = server.newExpr(E.String{ .data = "chunks" });
+ const specifier_string = server.newExpr(E.String{ .data = "specifier_string" });
+ const empty_array = server.newExpr(E.Array{});
+
+ for (
+ scbs.items(.use_directive),
+ scbs.items(.source_index),
+ ) |use, source_id| {
+ const source = sources[source_id];
+ if (use == .client) {
+ // TODO(@paperdave/kit): this file is being generated far too
+ // early. we don't know which exports are dead and which exports
+ // are live. Tree-shaking figures that out. However,
+ // tree-shaking happens after import binding, which would
+ // require this ast.
+ //
+ // The plan: change this to generate a stub ast which only has
+ // `export const serverManifest = undefined;`, and then
+ // re-generate this file later with the properly decided
+ // manifest. However, I will probably reconsider how this
+ // manifest is being generated when I write the whole
+ // "production build" part of Kit.
+
+ const keys = named_exports_array[source_id].keys();
+ const client_manifest_items = try alloc.alloc(G.Property, keys.len);
+
+ const client_path = server.newExpr(E.String{ .data = source.path.pretty });
+ const ssr_path = if (sc.separate_ssr_graph)
+ server.newExpr(E.String{ .data = try std.fmt.allocPrint(alloc, "ssr:{s}", .{source.path.pretty}) })
+ else
+ client_path;
+
+ for (keys, client_manifest_items) |export_name_string, *client_item| {
+ const server_key_string = try std.fmt.allocPrint(alloc, "{s}#{s}", .{ source.path.pretty, export_name_string });
+ const export_name = server.newExpr(E.String{ .data = export_name_string });
+
+ // write dependencies on the underlying module, not the proxy
+ try server_manifest_props.append(alloc, .{
+ .key = server.newExpr(E.String{ .data = server_key_string }),
+ .value = server.newExpr(E.Object{
+ .properties = try G.Property.List.fromSlice(alloc, &.{
+ .{ .key = id_string, .value = client_path },
+ .{ .key = name_string, .value = export_name },
+ .{ .key = chunks_string, .value = empty_array },
+ }),
+ }),
+ });
+ client_item.* = .{
+ .key = export_name,
+ .value = server.newExpr(E.Object{
+ .properties = try G.Property.List.fromSlice(alloc, &.{
+ .{ .key = name_string, .value = export_name },
+ .{ .key = specifier_string, .value = ssr_path },
+ }),
+ }),
+ };
+ }
- // Loop #1: populate the list of files that are react client components
- for (this.graph.use_directive_entry_points.items(.use_directive), this.graph.use_directive_entry_points.items(.source_index)) |use, source_id| {
- if (use == .@"use client") {
- any_client = true;
- react_client_component_boundary.set(source_id);
+ try client_manifest_props.append(alloc, .{
+ .key = client_path,
+ .value = server.newExpr(E.Object{
+ .properties = G.Property.List.init(client_manifest_items),
+ }),
+ });
+ } else {
+ bun.todoPanic(@src(), "\"use server\"", .{});
}
}
- this.graph.shadow_entry_point_range.loc.start = -1;
-
- var visit_queue = std.fifo.LinearFifo(Index.Int, .Dynamic).init(allocator);
- visit_queue.ensureUnusedCapacity(64) catch unreachable;
- defer visit_queue.deinit();
- const original_file_count = this.graph.entry_points.items.len;
-
- for (0..original_file_count) |entry_point_id| {
- // we are modifying the array while iterating
- // so we should be careful
- const entry_point_source_index = this.graph.entry_points.items[entry_point_id];
+ try server.appendStmt(S.Local{
+ .kind = .k_const,
+ .decls = try G.Decl.List.fromSlice(alloc, &.{.{
+ .binding = Binding.alloc(alloc, B.Identifier{
+ .ref = try server.newSymbol(.other, "serverManifest"),
+ }, Logger.Loc.Empty),
+ .value = server.newExpr(E.Object{
+ .properties = G.Property.List.fromList(server_manifest_props),
+ }),
+ }}),
+ .is_export = true,
+ });
+ try server.appendStmt(S.Local{
+ .kind = .k_const,
+ .decls = try G.Decl.List.fromSlice(alloc, &.{.{
+ .binding = Binding.alloc(alloc, B.Identifier{
+ .ref = try server.newSymbol(.other, "clientManifest"),
+ }, Logger.Loc.Empty),
+ .value = server.newExpr(E.Object{
+ .properties = G.Property.List.fromList(client_manifest_props),
+ }),
+ }}),
+ .is_export = true,
+ });
- var all_imported_files = try bun.bit_set.DynamicBitSet.initEmpty(allocator, bitset_length);
- defer all_imported_files.deinit();
- visit_queue.head = 0;
- visit_queue.count = 0;
- const input_path = this.graph.input_files.items(.source)[entry_point_source_index.get()].path;
+ this.graph.ast.set(Index.kit_server_data.get(), try server.toBundledAst());
+ this.graph.ast.set(Index.kit_client_data.get(), try client.toBundledAst());
+ }
- {
- const import_records = this.graph.ast.items(.import_records)[entry_point_source_index.get()];
- for (import_records.slice()) |import_record| {
- if (!import_record.source_index.isValid()) {
- continue;
- }
+ pub fn enqueueParseTask(
+ this: *BundleV2,
+ resolve_result: *const _resolver.Result,
+ source: Logger.Source,
+ loader: Loader,
+ known_target: options.Target,
+ ) OOM!Index.Int {
+ const source_index = Index.init(@as(u32, @intCast(this.graph.ast.len)));
+ this.graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable;
- if (all_imported_files.isSet(import_record.source_index.get())) {
- continue;
- }
+ this.graph.input_files.append(bun.default_allocator, .{
+ .source = source,
+ .loader = loader,
+ .side_effects = switch (loader) {
+ .text, .json, .toml, .file => _resolver.SideEffects.no_side_effects__pure_data,
+ else => _resolver.SideEffects.has_side_effects,
+ },
+ }) catch bun.outOfMemory();
+ var task = this.graph.allocator.create(ParseTask) catch bun.outOfMemory();
+ task.* = ParseTask.init(resolve_result, source_index, this);
+ task.loader = loader;
+ task.jsx = this.bundler.options.jsx;
+ task.task.node.next = null;
+ task.tree_shaking = this.linker.options.tree_shaking;
+ task.known_target = known_target;
- all_imported_files.set(import_record.source_index.get());
+ _ = @atomicRmw(usize, &this.graph.parse_pending, .Add, 1, .monotonic);
- try visit_queue.writeItem(import_record.source_index.get());
- }
+ // Handle onLoad plugins
+ if (!this.enqueueOnLoadPluginIfNeeded(task)) {
+ if (loader.shouldCopyForBundling()) {
+ var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()];
+ additional_files.push(this.graph.allocator, .{ .source_index = task.source_index.get() }) catch unreachable;
+ this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data;
+ this.graph.estimated_file_loader_count += 1;
}
- while (visit_queue.readItem()) |target_source_index| {
- const import_records = this.graph.ast.items(.import_records)[target_source_index];
- for (import_records.slice()) |import_record| {
- if (!import_record.source_index.isValid()) {
- continue;
- }
+ this.graph.pool.pool.schedule(ThreadPoolLib.Batch.from(&task.task));
+ }
- if (all_imported_files.isSet(import_record.source_index.get())) continue;
- all_imported_files.set(import_record.source_index.get());
+ return source_index.get();
+ }
- try visit_queue.writeItem(import_record.source_index.get());
- }
- }
+ pub fn enqueueParseTask2(
+ this: *BundleV2,
+ source: Logger.Source,
+ loader: Loader,
+ known_target: options.Target,
+ ) OOM!Index.Int {
+ const source_index = Index.init(@as(u32, @intCast(this.graph.ast.len)));
+ this.graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable;
- all_imported_files.setIntersection(react_client_component_boundary);
- if (all_imported_files.findFirstSet() == null) continue;
- const source_index = Index.init(@as(u32, @intCast(this.graph.ast.len)));
+ this.graph.input_files.append(bun.default_allocator, .{
+ .source = source,
+ .loader = loader,
+ .side_effects = switch (loader) {
+ .text, .json, .toml, .file => _resolver.SideEffects.no_side_effects__pure_data,
+ else => _resolver.SideEffects.has_side_effects,
+ },
+ }) catch bun.outOfMemory();
+ var task = this.graph.allocator.create(ParseTask) catch bun.outOfMemory();
+ task.* = .{
+ .ctx = this,
+ .path = source.path,
+ .contents_or_fd = .{
+ .contents = source.contents,
+ },
+ .side_effects = .has_side_effects,
+ .jsx = this.bundler.options.jsx,
+ .source_index = source_index,
+ .module_type = .unknown,
+ .emit_decorator_metadata = false, // TODO
+ .package_version = "",
+ .loader = loader,
+ .tree_shaking = this.linker.options.tree_shaking,
+ .known_target = known_target,
+ };
+ task.task.node.next = null;
- var shadow = ShadowEntryPoint{
- .from_source_index = entry_point_source_index.get(),
- .to_source_index = source_index.get(),
- };
- var builder = ShadowEntryPoint.Builder{
- .ctx = this,
- .source_code_buffer = MutableString.initEmpty(allocator),
- .resolved_source_indices = std.ArrayList(Index.Int).init(allocator),
- .shadow = &shadow,
- };
+ _ = @atomicRmw(usize, &this.graph.parse_pending, .Add, 1, .monotonic);
- var iter = all_imported_files.iterator(.{});
- while (iter.next()) |index| {
- builder.addClientComponent(index);
+ // Handle onLoad plugins
+ if (!this.enqueueOnLoadPluginIfNeeded(task)) {
+ if (loader.shouldCopyForBundling()) {
+ var additional_files: *BabyList(AdditionalFile) = &this.graph.input_files.items(.additional_files)[source_index.get()];
+ additional_files.push(this.graph.allocator, .{ .source_index = task.source_index.get() }) catch unreachable;
+ this.graph.input_files.items(.side_effects)[source_index.get()] = _resolver.SideEffects.no_side_effects__pure_data;
+ this.graph.estimated_file_loader_count += 1;
}
- bun.assert(builder.resolved_source_indices.items.len > 0);
- const path = Fs.Path.initWithNamespace(
- std.fmt.allocPrint(
- allocator,
- "{s}/{s}.client.js",
- .{ input_path.name.dirOrDot(), input_path.name.base },
- ) catch unreachable,
- "client-component",
- );
+ this.graph.pool.pool.schedule(ThreadPoolLib.Batch.from(&task.task));
+ }
+ return source_index.get();
+ }
- if (this.graph.shadow_entry_point_range.loc.start < 0) {
- this.graph.shadow_entry_point_range.loc.start = @as(i32, @intCast(source_index.get()));
- }
+ /// Enqueue a ServerComponentParseTask.
+ /// `source_without_index` is copied and assigned a new source index. That index is returned.
+ pub fn enqueueServerComponentGeneratedFile(
+ this: *BundleV2,
+ data: ServerComponentParseTask.Data,
+ source_without_index: Logger.Source,
+ ) OOM!Index.Int {
+ var new_source: Logger.Source = source_without_index;
+ const source_index = this.graph.input_files.len;
+ new_source.index = Index.init(source_index);
+ try this.graph.input_files.append(default_allocator, .{
+ .source = new_source,
+ .loader = .js,
+ .side_effects = .has_side_effects,
+ });
+ try this.graph.ast.append(default_allocator, JSAst.empty);
- this.graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable;
- this.graph.shadow_entry_points.append(allocator, shadow) catch unreachable;
- this.graph.input_files.append(bun.default_allocator, .{
- .source = .{
- .path = path,
- .key_path = path,
- .contents = builder.source_code_buffer.toOwnedSliceLeaky(),
- .index = source_index,
- },
- .loader = options.Loader.js,
- .side_effects = _resolver.SideEffects.has_side_effects,
- }) catch unreachable;
+ const task = bun.new(ServerComponentParseTask, .{
+ .data = data,
+ .ctx = this,
+ .source = new_source,
+ });
- var task = bun.default_allocator.create(ParseTask) catch unreachable;
- task.* = ParseTask{
- .ctx = this,
- .path = path,
- // unknown at this point:
- .contents_or_fd = .{
- .contents = builder.source_code_buffer.toOwnedSliceLeaky(),
- },
- .side_effects = _resolver.SideEffects.has_side_effects,
- .jsx = this.bundler.options.jsx,
- .source_index = source_index,
- .module_type = .unknown,
- .loader = options.Loader.js,
- .tree_shaking = this.linker.options.tree_shaking,
- .known_target = options.Target.browser,
- .presolved_source_indices = builder.resolved_source_indices.items,
- };
- task.task.node.next = null;
- try this.graph.use_directive_entry_points.append(this.graph.allocator, js_ast.UseDirective.EntryPoint{
- .source_index = source_index.get(),
- .use_directive = .@"use client",
- });
+ _ = @atomicRmw(usize, &this.graph.parse_pending, .Add, 1, .monotonic);
- _ = @atomicRmw(usize, &this.graph.parse_pending, .Add, 1, .monotonic);
- this.graph.entry_points.append(allocator, source_index) catch unreachable;
- this.graph.pool.pool.schedule(ThreadPoolLib.Batch.from(&task.task));
- this.graph.shadow_entry_point_range.len += 1;
- }
+ this.graph.pool.pool.schedule(ThreadPoolLib.Batch.from(&task.task));
+
+ return @intCast(source_index);
}
pub fn generateFromCLI(
bundler: *ThisBundler,
+ kit_options: ?KitOptions,
allocator: std.mem.Allocator,
event_loop: EventLoop,
unique_key: u64,
@@ -1053,14 +1180,14 @@ pub const BundleV2 = struct {
minify_duration: *u64,
source_code_size: *u64,
) !std.ArrayList(options.OutputFile) {
- var this = try BundleV2.init(bundler, allocator, event_loop, enable_reloading, null, null);
+ var this = try BundleV2.init(bundler, kit_options, allocator, event_loop, enable_reloading, null, null);
this.unique_key = unique_key;
if (this.bundler.log.hasErrors()) {
return error.BuildFailed;
}
- this.graph.pool.pool.schedule(try this.enqueueEntryPoints(this.bundler.options.entry_points));
+ this.graph.pool.pool.schedule(try this.enqueueEntryPoints(this.bundler.options.entry_points, &.{}));
if (this.bundler.log.hasErrors()) {
return error.BuildFailed;
@@ -1071,19 +1198,12 @@ pub const BundleV2 = struct {
minify_duration.* = @as(u64, @intCast(@divTrunc(@as(i64, @truncate(std.time.nanoTimestamp())) - @as(i64, @truncate(bun.CLI.start_time)), @as(i64, std.time.ns_per_ms))));
source_code_size.* = this.source_code_length;
- if (this.graph.use_directive_entry_points.len > 0) {
- if (this.bundler.log.hasErrors()) {
- return error.BuildFailed;
- }
-
- try this.enqueueShadowEntryPoints();
- this.waitForParse();
- }
-
if (this.bundler.log.hasErrors()) {
return error.BuildFailed;
}
+ try this.processServerComponentManifestFiles();
+
const reachable_files = try this.findReachableFiles();
reachable_files_count.* = reachable_files.len -| 1; // - 1 for the runtime
@@ -1094,7 +1214,7 @@ pub const BundleV2 = struct {
const chunks = try this.linker.link(
this,
this.graph.entry_points.items,
- this.graph.use_directive_entry_points,
+ this.graph.server_component_boundaries,
reachable_files,
unique_key,
);
@@ -1264,7 +1384,7 @@ pub const BundleV2 = struct {
bundler.options.entry_points = config.entry_points.keys();
bundler.options.jsx = config.jsx;
bundler.options.no_macros = config.no_macros;
- bundler.options.react_server_components = config.server_components.client.items.len > 0 or config.server_components.server.items.len > 0;
+ bundler.options.server_components = config.server_components.client.items.len > 0 or config.server_components.server.items.len > 0;
bundler.options.loaders = try options.loadersFromTransformOptions(allocator, config.loaders, config.target);
bundler.options.entry_naming = config.names.entry_point.data;
bundler.options.chunk_naming = config.names.chunk.data;
@@ -1660,7 +1780,11 @@ pub const BundleV2 = struct {
this.free_list.clearAndFree();
}
- pub fn runFromJSInNewThread(this: *BundleV2, entry_points: []const []const u8) !std.ArrayList(options.OutputFile) {
+ pub fn runFromJSInNewThread(
+ this: *BundleV2,
+ entry_points: []const []const u8,
+ client_entry_points: []const []const u8,
+ ) !std.ArrayList(options.OutputFile) {
this.unique_key = std.crypto.random.int(u64);
if (this.bundler.log.errors > 0) {
@@ -1672,7 +1796,7 @@ pub const BundleV2 = struct {
bun.Mimalloc.mi_collect(true);
}
- this.graph.pool.pool.schedule(try this.enqueueEntryPoints(entry_points));
+ this.graph.pool.pool.schedule(try this.enqueueEntryPoints(entry_points, client_entry_points));
// We must wait for all the parse tasks to complete, even if there are errors.
this.waitForParse();
@@ -1686,6 +1810,8 @@ pub const BundleV2 = struct {
return error.BuildFailed;
}
+ try this.processServerComponentManifestFiles();
+
try this.cloneAST();
if (comptime FeatureFlags.help_catch_memory_issues) {
@@ -1700,7 +1826,7 @@ pub const BundleV2 = struct {
const chunks = try this.linker.link(
this,
this.graph.entry_points.items,
- this.graph.use_directive_entry_points,
+ this.graph.server_component_boundaries,
reachable_files,
this.unique_key,
);
@@ -1718,7 +1844,7 @@ pub const BundleV2 = struct {
import_record: *const ImportRecord,
source_file: []const u8,
import_record_index: u32,
- original_target: ?options.Target,
+ original_target: options.Target,
) bool {
if (this.plugins) |plugins| {
if (plugins.hasAnyMatches(&import_record.path, false)) {
@@ -1737,10 +1863,10 @@ pub const BundleV2 = struct {
.source_file = source_file,
.import_record_index = import_record_index,
.importer_source_index = source_index,
- .original_target = original_target orelse this.bundler.options.target,
+ .original_target = original_target,
},
},
- this.completion.?.js,
+ this.completion.?,
);
resolve.dispatch();
return true;
@@ -1760,7 +1886,7 @@ pub const BundleV2 = struct {
});
var load = bun.default_allocator.create(JSC.API.JSBundler.Load) catch unreachable;
load.* = JSC.API.JSBundler.Load.create(
- this.completion.?.js,
+ this.completion.?,
parse.source_index,
parse.path.loader(&this.bundler.options.loaders) orelse options.Loader.js,
parse.path,
@@ -1774,6 +1900,60 @@ pub const BundleV2 = struct {
return false;
}
+ fn pathWithPrettyInitialized(this: *BundleV2, path: Fs.Path, target: options.Target) !Fs.Path {
+ if (path.pretty.ptr != path.text.ptr) {
+ // TODO(@paperdave): there is a high chance this dupe is no longer required
+ return path.dupeAlloc(this.graph.allocator);
+ }
+
+ // TODO: outbase
+ var buf: bun.PathBuffer = undefined;
+ const rel = bun.path.relativePlatform(this.bundler.fs.top_level_dir, path.text, .loose, false);
+ var path_clone = path;
+ // stack-allocated temporary is not leaked because dupeAlloc on the path will
+ // move .pretty into the heap. that function also fixes some slash issues.
+ if (target == .kit_server_components_ssr) {
+ // the SSR graph needs different pretty names or else HMR mode will
+ // confuse the two modules.
+ path_clone.pretty = std.fmt.bufPrint(&buf, "ssr:{s}", .{rel}) catch buf[0..];
+ } else {
+ path_clone.pretty = rel;
+ }
+ return path_clone.dupeAllocFixPretty(this.graph.allocator);
+ }
+
+ fn reserveSourceIndexesForKit(this: *BundleV2) !void {
+ const fw = this.framework orelse return;
+ _ = fw.server_components orelse return;
+
+ // Call this after
+ bun.assert(this.graph.input_files.len == 1);
+ bun.assert(this.graph.ast.len == 1);
+
+ try this.graph.ast.ensureUnusedCapacity(this.graph.allocator, 2);
+ try this.graph.input_files.ensureUnusedCapacity(this.graph.allocator, 2);
+
+ const server_source = kit.server_virtual_source;
+ const client_source = kit.client_virtual_source;
+
+ this.graph.input_files.appendAssumeCapacity(.{
+ .source = server_source,
+ .loader = .js,
+ .side_effects = .no_side_effects__pure_data,
+ });
+ this.graph.input_files.appendAssumeCapacity(.{
+ .source = client_source,
+ .loader = .js,
+ .side_effects = .no_side_effects__pure_data,
+ });
+
+ bun.assert(this.graph.input_files.items(.source)[Index.kit_server_data.get()].index.get() == Index.kit_server_data.get());
+ bun.assert(this.graph.input_files.items(.source)[Index.kit_client_data.get()].index.get() == Index.kit_client_data.get());
+
+ this.graph.ast.appendAssumeCapacity(JSAst.empty);
+ this.graph.ast.appendAssumeCapacity(JSAst.empty);
+ }
+
// TODO: remove ResolveQueue
//
// Moving this to the Bundle thread was a significant perf improvement on Linux for first builds
@@ -1816,6 +1996,24 @@ pub const BundleV2 = struct {
continue;
}
+ if (this.framework) |fw| if (fw.server_components != null) {
+ switch (ast.target.isServerSide()) {
+ inline else => |is_server| {
+ const src = if (is_server) kit.server_virtual_source else kit.client_virtual_source;
+ if (strings.eqlComptime(import_record.path.text, src.path.pretty)) {
+ if (is_server) {
+ this.graph.kit_referenced_server_data = true;
+ } else {
+ this.graph.kit_referenced_client_data = true;
+ }
+ import_record.path.namespace = "bun";
+ import_record.source_index = src.index;
+ continue;
+ }
+ },
+ }
+ };
+
if (ast.target.isBun()) {
if (JSC.HardcodedModule.Aliases.get(import_record.path.text, options.Target.bun)) |replacement| {
import_record.path.text = replacement.path;
@@ -1870,7 +2068,47 @@ pub const BundleV2 = struct {
continue;
}
- var resolve_result = this.bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind) catch |err| {
+ const bundler, const renderer: kit.Renderer, const target =
+ if (import_record.tag == .kit_resolve_to_ssr_graph)
+ brk: {
+ // TODO: consider moving this error into js_parser so it is caught more reliably
+ // Then we can assert(this.framework != null)
+ if (this.framework == null) {
+ this.bundler.log.addErrorFmt(
+ source,
+ import_record.range.loc,
+ this.graph.allocator,
+ "The 'bun_kit_graph' import attribute cannot be used outside of a Bun Kit bundle",
+ .{},
+ ) catch @panic("unexpected log error");
+ continue;
+ }
+
+ const is_supported = this.framework.?.server_components != null and
+ this.framework.?.server_components.?.separate_ssr_graph;
+ if (!is_supported) {
+ this.bundler.log.addErrorFmt(
+ source,
+ import_record.range.loc,
+ this.graph.allocator,
+ "Framework does not have a separate SSR graph to put this import into",
+ .{},
+ ) catch @panic("unexpected log error");
+ continue;
+ }
+
+ break :brk .{
+ this.ssr_bundler,
+ .ssr,
+ .kit_server_components_ssr,
+ };
+ } else .{
+ this.bundlerForTarget(ast.target),
+ ast.target.kitRenderer(),
+ ast.target,
+ };
+
+ var resolve_result = bundler.resolver.resolve(source_dir, import_record.path.text, import_record.kind) catch |err| {
// Disable failing packages from being printed.
// This may cause broken code to write.
// However, doing this means we tell them all the resolve errors
@@ -1884,7 +2122,7 @@ pub const BundleV2 = struct {
if (!import_record.handles_import_errors) {
last_error = err;
if (isPackagePath(import_record.path.text)) {
- if (ast.target.isWebLike() and options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
+ if (ast.target == .browser and options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
addError(
this.bundler.log,
source,
@@ -1930,7 +2168,7 @@ pub const BundleV2 = struct {
// if there were errors, lets go ahead and collect them all
if (last_error != null) continue;
- var path: *Fs.Path = resolve_result.path() orelse {
+ const path: *Fs.Path = resolve_result.path() orelse {
import_record.path.is_disabled = true;
import_record.source_index = Index.invalid;
@@ -1945,9 +2183,19 @@ pub const BundleV2 = struct {
continue;
}
+ if (this.bundler.options.kit) |dev_server| {
+ if (!dev_server.isFileStale(path.text, renderer)) {
+ import_record.source_index = Index.invalid;
+ // TODO(paperdave/kit): this relative can be done without a clone in most cases
+ const rel = bun.path.relativePlatform(this.bundler.fs.top_level_dir, path.text, .loose, false);
+ import_record.path.pretty = this.graph.allocator.dupe(u8, rel) catch bun.outOfMemory();
+ continue;
+ }
+ }
+
const hash_key = path.hashKey();
- if (this.graph.path_to_source_index_map.get(hash_key)) |id| {
+ if (this.pathToSourceIndexMap(target).get(hash_key)) |id| {
import_record.source_index = Index.init(id);
continue;
}
@@ -1955,16 +2203,10 @@ pub const BundleV2 = struct {
const resolve_entry = resolve_queue.getOrPut(hash_key) catch bun.outOfMemory();
if (resolve_entry.found_existing) {
import_record.path = resolve_entry.value_ptr.*.path;
-
continue;
}
- if (path.pretty.ptr == path.text.ptr) {
- // TODO: outbase
- const rel = bun.path.relativePlatform(this.bundler.fs.top_level_dir, path.text, .loose, false);
- path.pretty = this.graph.allocator.dupe(u8, rel) catch bun.outOfMemory();
- }
- path.* = path.dupeAllocFixPretty(this.graph.allocator) catch bun.outOfMemory();
+ path.* = this.pathWithPrettyInitialized(path.*, target) catch bun.outOfMemory();
var secondary_path_to_copy: ?Fs.Path = null;
if (resolve_result.path_pair.secondary) |*secondary| {
@@ -1979,17 +2221,10 @@ pub const BundleV2 = struct {
import_record.path = path.*;
debug("created ParseTask: {s}", .{path.text});
- var resolve_task = bun.default_allocator.create(ParseTask) catch bun.outOfMemory();
- resolve_task.* = ParseTask.init(&resolve_result, null, this);
-
+ const resolve_task = bun.default_allocator.create(ParseTask) catch bun.outOfMemory();
+ resolve_task.* = ParseTask.init(&resolve_result, Index.invalid, this);
resolve_task.secondary_path_for_commonjs_interop = secondary_path_to_copy;
-
- if (parse_result.value.success.use_directive != .none) {
- resolve_task.known_target = ast.target;
- } else {
- resolve_task.known_target = ast.target;
- }
-
+ resolve_task.known_target = target;
resolve_task.jsx.development = resolve_result.jsx.development;
if (import_record.tag.loader()) |loader| {
@@ -2072,51 +2307,23 @@ pub const BundleV2 = struct {
);
}
}
- // else if (this.kit_watcher) |watcher| {
- // if (empty_result.watcher_data.fd != .zero and empty_result.watcher_data.fd != bun.invalid_fd) {
- // _ = watcher.addFile(
- // empty_result.watcher_data.fd,
- // input_files.items(.source)[empty_result.source_index.get()].path.text,
- // bun.hash32(input_files.items(.source)[empty_result.source_index.get()].path.text),
- // graph.input_files.items(.loader)[empty_result.source_index.get()],
- // empty_result.watcher_data.dir_fd,
- // null,
- // false,
- // );
- // }
- // }
},
.success => |*result| {
result.log.cloneToWithRecycled(this.bundler.log, true) catch unreachable;
- {
- // to minimize contention, we add watcher here
- if (this.bun_watcher) |watcher| {
- if (result.watcher_data.fd != .zero and result.watcher_data.fd != bun.invalid_fd) {
- _ = watcher.addFile(
- result.watcher_data.fd,
- result.source.path.text,
- bun.hash32(result.source.path.text),
- result.source.path.loader(&this.bundler.options.loaders) orelse options.Loader.file,
- result.watcher_data.dir_fd,
- result.watcher_data.package_json,
- false,
- );
- }
+ // to minimize contention, we add watcher on the bundling thread instead of the parsing thread.
+ if (this.bun_watcher) |watcher| {
+ if (result.watcher_data.fd != .zero and result.watcher_data.fd != bun.invalid_fd) {
+ _ = watcher.addFile(
+ result.watcher_data.fd,
+ result.source.path.text,
+ bun.hash32(result.source.path.text),
+ result.source.path.loader(&this.bundler.options.loaders) orelse options.Loader.file,
+ result.watcher_data.dir_fd,
+ result.watcher_data.package_json,
+ false,
+ );
}
- // else if (this.kit_watcher) |watcher| {
- // if (result.watcher_data.fd != .zero and result.watcher_data.fd != bun.invalid_fd) {
- // _ = watcher.addFile(
- // result.watcher_data.fd,
- // result.source.path.text,
- // bun.hash32(result.source.path.text),
- // result.source.path.loader(&this.bundler.options.loaders) orelse options.Loader.file,
- // result.watcher_data.dir_fd,
- // result.watcher_data.package_json,
- // false,
- // );
- // }
- // }
}
// Warning: this array may resize in this function call
@@ -2138,17 +2345,18 @@ pub const BundleV2 = struct {
var iter = resolve_queue.iterator();
+ const path_to_source_index_map = this.pathToSourceIndexMap(result.ast.target);
while (iter.next()) |entry| {
const hash = entry.key_ptr.*;
const value = entry.value_ptr.*;
- var existing = graph.path_to_source_index_map.getOrPut(graph.allocator, hash) catch unreachable;
+ var existing = path_to_source_index_map.getOrPut(graph.allocator, hash) catch unreachable;
// If the same file is imported and required, and those point to different files
// Automatically rewrite it to the secondary one
if (value.secondary_path_for_commonjs_interop) |secondary_path| {
const secondary_hash = secondary_path.hashKey();
- if (graph.path_to_source_index_map.get(secondary_hash)) |secondary| {
+ if (path_to_source_index_map.get(secondary_hash)) |secondary| {
existing.found_existing = true;
existing.value_ptr.* = secondary;
}
@@ -2214,12 +2422,12 @@ pub const BundleV2 = struct {
}
for (import_records.slice(), 0..) |*record, i| {
- if (graph.path_to_source_index_map.get(record.path.hashKey())) |source_index| {
+ if (path_to_source_index_map.get(record.path.hashKey())) |source_index| {
record.source_index.value = source_index;
if (getRedirectId(result.ast.redirect_import_record_index)) |compare| {
if (compare == @as(u32, @truncate(i))) {
- graph.path_to_source_index_map.put(
+ path_to_source_index_map.put(
graph.allocator,
result.source.path.hashKey(),
source_index,
@@ -2231,14 +2439,46 @@ pub const BundleV2 = struct {
result.ast.import_records = import_records;
graph.ast.set(result.source.index.get(), result.ast);
- if (result.use_directive != .none) {
- graph.use_directive_entry_points.append(
+
+ // For files with use directives, index and prepare the other side.
+ if (result.use_directive != .none and
+ ((result.use_directive == .client) == (result.ast.target == .browser)))
+ {
+ if (result.use_directive == .server)
+ bun.todoPanic(@src(), "\"use server\"", .{});
+ if (!this.framework.?.server_components.?.separate_ssr_graph)
+ bun.todoPanic(@src(), "implement 'separate_ssr_graph = false'", .{});
+
+ const reference_source_index = this.enqueueServerComponentGeneratedFile(
+ .{ .client_reference_proxy = .{
+ .other_source = result.source,
+ .named_exports = result.ast.named_exports,
+ } },
+ result.source,
+ ) catch bun.outOfMemory();
+
+ this.graph.path_to_source_index_map.put(
graph.allocator,
- .{
- .source_index = result.source.index.get(),
- .use_directive = result.use_directive,
- },
- ) catch unreachable;
+ result.source.path.hashKey(),
+ reference_source_index,
+ ) catch bun.outOfMemory();
+
+ var ssr_source = result.source;
+ ssr_source.path.pretty = ssr_source.path.text;
+ ssr_source.path = this.pathWithPrettyInitialized(ssr_source.path, .kit_server_components_ssr) catch bun.outOfMemory();
+ const ssr_index = this.enqueueParseTask2(
+ ssr_source,
+ .tsx,
+ .kit_server_components_ssr,
+ ) catch bun.outOfMemory();
+
+ graph.server_component_boundaries.put(
+ graph.allocator,
+ result.source.index.get(),
+ result.use_directive,
+ reference_source_index,
+ ssr_index,
+ ) catch bun.outOfMemory();
}
},
.err => |*err| {
@@ -2262,18 +2502,29 @@ pub const BundleV2 = struct {
},
}
}
+
+ /// To satisfy the interface from NewHotReloader()
+ pub fn getLoaders(vm: *BundleV2) *bun.options.Loader.HashTable {
+ return &vm.bundler.options.loaders;
+ }
+
+ /// To satisfy the interface from NewHotReloader()
+ pub fn bustDirCache(vm: *BundleV2, path: []const u8) bool {
+ return vm.bundler.resolver.bustDirCache(path);
+ }
};
/// Used to keep the bundle thread from spinning on Windows
pub fn timerCallback(_: *bun.windows.libuv.Timer) callconv(.C) void {}
-/// Used for Bun.build and Kit, as they asynchronously schedule multiple
-/// bundles. To account for their respective differences, the scheduling code
-/// is generalized over the Task structure.
+/// Originally, kit.DevServer required a separate bundling thread, but that was
+/// later removed. The bundling thread's scheduling logic is generalized over
+/// the completion structure.
+///
+/// CompletionStruct's interface:
///
/// - `configureBundler` is used to configure `Bundler`.
/// - `completeOnBundleThread` is used to tell the task that it is done.
-///
pub fn BundleThread(CompletionStruct: type) type {
return struct {
const Self = @This();
@@ -2385,6 +2636,7 @@ pub fn BundleThread(CompletionStruct: type) type {
const this = try BundleV2.init(
bundler,
+ null, // TODO: Kit
allocator,
JSC.AnyEventLoop.init(allocator),
false,
@@ -2392,17 +2644,9 @@ pub fn BundleThread(CompletionStruct: type) type {
heap,
);
- // switch (CompletionStruct) {
- // bun.kit.DevServer.BundleTask => {
- // this.kit_watcher = completion.route.dev.bun_watcher;
- // },
- // else => {},
- // }
-
this.plugins = completion.plugins;
this.completion = switch (CompletionStruct) {
- BundleV2.JSBundleCompletionTask => .{ .js = completion },
- bun.kit.DevServer.BundleTask => .{ .kit = completion },
+ BundleV2.JSBundleCompletionTask => completion,
else => @compileError("Unknown completion struct: " ++ CompletionStruct),
};
completion.bundler = this;
@@ -2417,7 +2661,7 @@ pub fn BundleThread(CompletionStruct: type) type {
}
errdefer {
- // Wait for wait groups to finish. There still may be
+ // Wait for wait groups to finish. There still may be ongoing work.
this.linker.source_maps.line_offset_wait_group.wait();
this.linker.source_maps.quoted_contents_wait_group.wait();
@@ -2428,7 +2672,7 @@ pub fn BundleThread(CompletionStruct: type) type {
completion.result = .{
.value = .{
- .output_files = try this.runFromJSInNewThread(bundler.options.entry_points),
+ .output_files = try this.runFromJSInNewThread(bundler.options.entry_points, &.{}),
},
};
@@ -2441,6 +2685,7 @@ pub fn BundleThread(CompletionStruct: type) type {
}
const UseDirective = js_ast.UseDirective;
+const ServerComponentBoundary = js_ast.ServerComponentBoundary;
pub const ParseTask = struct {
path: Fs.Path,
@@ -2458,7 +2703,7 @@ pub const ParseTask = struct {
source_index: Index = Index.invalid,
task: ThreadPoolLib.Task = .{ .callback = &callback },
tree_shaking: bool = false,
- known_target: ?options.Target = null,
+ known_target: options.Target,
module_type: options.ModuleType = .unknown,
emit_decorator_metadata: bool = false,
ctx: *BundleV2,
@@ -2470,7 +2715,7 @@ pub const ParseTask = struct {
const debug = Output.scoped(.ParseTask, false);
- pub fn init(resolve_result: *const _resolver.Result, source_index: ?Index, ctx: *BundleV2) ParseTask {
+ pub fn init(resolve_result: *const _resolver.Result, source_index: Index, ctx: *BundleV2) ParseTask {
return .{
.ctx = ctx,
.path = resolve_result.path_pair.primary,
@@ -2482,10 +2727,11 @@ pub const ParseTask = struct {
},
.side_effects = resolve_result.primary_side_effects_data,
.jsx = resolve_result.jsx,
- .source_index = source_index orelse Index.invalid,
+ .source_index = source_index,
.module_type = resolve_result.module_type,
.emit_decorator_metadata = resolve_result.emit_decorator_metadata,
.package_version = if (resolve_result.package_json) |package_json| package_json.version else "",
+ .known_target = ctx.bundler.options.target,
};
}
@@ -2611,16 +2857,16 @@ pub const ParseTask = struct {
const parse_task = ParseTask{
.ctx = undefined,
.path = Fs.Path.initWithNamespace("runtime", "bun:runtime"),
- .side_effects = _resolver.SideEffects.no_side_effects__pure_data,
- .jsx = options.JSX.Pragma{
+ .side_effects = .no_side_effects__pure_data,
+ .jsx = .{
.parse = false,
- // .supports_react_refresh = false,
},
.contents_or_fd = .{
.contents = runtime_code,
},
.source_index = Index.runtime,
- .loader = Loader.js,
+ .loader = .js,
+ .known_target = target,
};
const source = Logger.Source{
.path = parse_task.path,
@@ -2630,6 +2876,7 @@ pub const ParseTask = struct {
};
return .{ .parse_task = parse_task, .source = source };
}
+
fn getRuntimeSource(target: options.Target) RuntimeSource {
return switch (target) {
inline else => |t| comptime getRuntimeSourceComptime(t),
@@ -2639,8 +2886,9 @@ pub const ParseTask = struct {
pub const Result = struct {
task: EventLoop.Task,
ctx: *BundleV2,
+ value: Value,
- value: union(Tag) {
+ pub const Value = union(Tag) {
success: Success,
err: Error,
empty: struct {
@@ -2648,7 +2896,7 @@ pub const ParseTask = struct {
watcher_data: WatcherData = .{},
},
- },
+ };
const WatcherData = struct {
fd: bun.StoredFileDescriptorType = .zero,
@@ -2893,37 +3141,6 @@ pub const ParseTask = struct {
.fd => brk: {
const trace = tracer(@src(), "readFile");
defer trace.end();
- if (bundler.options.framework) |framework| {
- if (framework.override_modules_hashes.len > 0) {
- const package_relative_path_hash = bun.hash(file_path.pretty);
- if (std.mem.indexOfScalar(
- u64,
- framework.override_modules_hashes,
- package_relative_path_hash,
- )) |index| {
- const relative_path = [_]string{
- framework.resolved_dir,
- framework.override_modules.values[index],
- };
- const override_path = bundler.fs.absBuf(
- &relative_path,
- &override_file_path_buf,
- );
- override_file_path_buf[override_path.len] = 0;
- const override_pathZ = override_file_path_buf[0..override_path.len :0];
- debug("{s} -> {s}", .{ file_path.text, override_path });
- break :brk try resolver.caches.fs.readFileWithAllocator(
- allocator,
- bundler.fs,
- override_pathZ,
- .zero,
- false,
- null,
- );
- }
- }
- }
-
if (strings.eqlComptime(file_path.namespace, "node"))
break :brk CacheEntry{
.contents = NodeFallbackModules.contentsFromPath(file_path.text) orelse "",
@@ -2978,9 +3195,7 @@ pub const ParseTask = struct {
const will_close_file_descriptor = task.contents_or_fd == .fd and
!entry.fd.isStdio() and
- (this.ctx.bun_watcher == null
- // and this.ctx.kit_watcher == null
- );
+ (this.ctx.bun_watcher == null);
if (will_close_file_descriptor) {
_ = entry.closeFD();
}
@@ -2993,13 +3208,24 @@ pub const ParseTask = struct {
};
step.* = .parse;
- const is_empty = entry.contents.len == 0 or (entry.contents.len < 33 and strings.trim(entry.contents, " \n\r").len == 0);
+ const is_empty = strings.isAllWhitespace(entry.contents);
- const use_directive = if (!is_empty and bundler.options.react_server_components)
- UseDirective.parse(entry.contents)
+ const use_directive: UseDirective = if (!is_empty and bundler.options.server_components)
+ if (UseDirective.parse(entry.contents)) |use|
+ use
+ else
+ .none
else
.none;
+ if ((use_directive == .client and task.known_target != .kit_server_components_ssr) or
+ (bundler.options.server_components and task.known_target == .browser))
+ {
+ bundler = this.ctx.client_bundler;
+ resolver = &bundler.resolver;
+ bun.assert(bundler.options.target == .browser);
+ }
+
var source = Logger.Source{
.path = file_path,
.key_path = file_path,
@@ -3008,7 +3234,11 @@ pub const ParseTask = struct {
.contents_is_recycled = false,
};
- const target = targetFromHashbang(entry.contents) orelse use_directive.target(task.known_target orelse bundler.options.target);
+ const target = (if (task.source_index.get() == 1) targetFromHashbang(entry.contents) else null) orelse
+ if (task.known_target == .kit_server_components_ssr)
+ .kit_server_components_ssr
+ else
+ bundler.options.target;
var opts = js_parser.Parser.Options.init(task.jsx, loader);
opts.bundle = true;
@@ -3027,8 +3257,10 @@ pub const ParseTask = struct {
opts.features.emit_decorator_metadata = bundler.options.emit_decorator_metadata;
opts.features.unwrap_commonjs_packages = bundler.options.unwrap_commonjs_packages;
opts.features.hot_module_reloading = bundler.options.output_format == .internal_kit_dev and !source.index.isRuntime();
- opts.features.react_fast_refresh = (bundler.options.hot_module_reloading or bundler.options.react_fast_refresh) and
- loader.isJSX() and !source.path.isNodeModule();
+ opts.features.react_fast_refresh = target == .browser and
+ bundler.options.react_fast_refresh and
+ loader.isJSX() and
+ !source.path.isNodeModule();
opts.ignore_dce_annotations = bundler.options.ignore_dce_annotations and !source.index.isRuntime();
@@ -3047,7 +3279,6 @@ pub const ParseTask = struct {
task.jsx.parse = loader.isJSX();
var unique_key_for_additional_file: []const u8 = "";
-
var ast: JSAst = if (!is_empty)
try getAST(log, bundler, opts, allocator, resolver, source, loader, task.ctx.unique_key, &unique_key_for_additional_file)
else switch (opts.module_type == .esm) {
@@ -3062,6 +3293,7 @@ pub const ParseTask = struct {
};
ast.target = target;
+
if (ast.parts.len <= 1) {
task.side_effects = .no_side_effects__empty_ast;
}
@@ -3075,11 +3307,7 @@ pub const ParseTask = struct {
}
}
- // never a react client component if RSC is not enabled.
- bun.assert(use_directive == .none or bundler.options.react_server_components);
-
step.* = .resolve;
- ast.target = target;
return Result.Success{
.ast = ast,
@@ -3182,6 +3410,171 @@ pub const ParseTask = struct {
}
};
+/// Files for Server Components are generated using `AstBuilder`, instead of
+/// running through the js_parser. It emits a ParseTask.Result and joins
+/// with the same logic that it runs though.
+pub const ServerComponentParseTask = struct {
+ task: ThreadPoolLib.Task = .{ .callback = &taskCallbackWrap },
+ data: Data,
+ ctx: *BundleV2,
+ source: Logger.Source,
+
+ pub const Data = union(enum) {
+ /// Generate server-side code for a "use client" module. Given the
+ /// client ast, a "reference proxy" is created with identical exports.
+ client_reference_proxy: ReferenceProxy,
+
+ pub const ReferenceProxy = struct {
+ other_source: Logger.Source,
+ named_exports: JSAst.NamedExports,
+ };
+ };
+
+ fn taskCallbackWrap(thread_pool_task: *ThreadPoolLib.Task) void {
+ const task: *ServerComponentParseTask = @fieldParentPtr("task", thread_pool_task);
+ var worker = ThreadPool.Worker.get(task.ctx);
+ defer worker.unget();
+ var log = Logger.Log.init(worker.allocator);
+
+ const result = bun.default_allocator.create(ParseTask.Result) catch bun.outOfMemory();
+ result.* = .{
+ .ctx = task.ctx,
+ .task = undefined,
+
+ .value = if (taskCallback(
+ task,
+ &log,
+ worker.allocator,
+ )) |success|
+ .{ .success = success }
+ else |err| brk: {
+ break :brk .{ .err = .{
+ .err = err,
+ .step = .resolve,
+ .log = log,
+ } };
+ },
+ };
+
+ switch (worker.ctx.loop().*) {
+ .js => |jsc_event_loop| {
+ jsc_event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.fromCallback(result, ParseTask.onComplete));
+ },
+ .mini => |*mini| {
+ mini.enqueueTaskConcurrentWithExtraCtx(
+ ParseTask.Result,
+ BundleV2,
+ result,
+ BundleV2.onParseTaskComplete,
+ .task,
+ );
+ },
+ }
+ }
+
+ fn taskCallback(
+ task: *ServerComponentParseTask,
+ log: *Logger.Log,
+ allocator: std.mem.Allocator,
+ ) !ParseTask.Result.Success {
+ var ab = try AstBuilder.init(allocator, &task.source, task.ctx.bundler.options.hot_module_reloading);
+
+ try switch (task.data) {
+ .client_reference_proxy => |data| task.generateClientReferenceProxy(data, &ab),
+ };
+
+ var ast = try ab.toBundledAst();
+ ast.target = switch (task.data) {
+ // Server-side
+ .client_reference_proxy => task.ctx.bundler.options.target,
+ };
+
+ return .{
+ .ast = ast,
+ .source = task.source,
+ .log = log.*,
+ };
+ }
+
+ fn generateClientReferenceProxy(task: *ServerComponentParseTask, data: Data.ReferenceProxy, b: *AstBuilder) !void {
+ const server_components = task.ctx.framework.?.server_components orelse
+ unreachable; // config must be non-null to enter this function
+
+ const client_named_exports = data.named_exports;
+
+ const register_client_reference = (try b.addImportStmt(
+ server_components.server_runtime_import,
+ &.{server_components.server_register_client_reference},
+ ))[0];
+
+ const module_path = b.newExpr(E.String{ .data = data.other_source.path.pretty });
+
+ for (client_named_exports.keys()) |key| {
+ const export_ref = try b.newSymbol(.other, key);
+
+ const is_default = bun.strings.eqlComptime(key, "default");
+
+ // This error message is taken from
+ // https://github.com/facebook/react/blob/c5b9375767e2c4102d7e5559d383523736f1c902/packages/react-server-dom-webpack/src/ReactFlightWebpackNodeLoader.js#L323-L354
+ const err_msg_string = try if (is_default)
+ std.fmt.allocPrint(
+ b.allocator,
+ "Attempted to call the default export of {[module_path]s} from " ++
+ "the server, but it's on the client. It's not possible to invoke a " ++
+ "client function from the server, it can only be rendered as a " ++
+ "Component or passed to props of a Client Component.",
+ .{ .module_path = data.other_source.path.pretty },
+ )
+ else
+ std.fmt.allocPrint(
+ b.allocator,
+ "Attempted to call {[key]s}() from the server but {[key]s} " ++
+ "is on the client. It's not possible to invoke a client function from " ++
+ "the server, it can only be rendered as a Component or passed to " ++
+ "props of a Client Component.",
+ .{ .key = key },
+ );
+
+ // throw new Error(...)
+ const err_msg = b.newExpr(E.New{
+ .target = b.newExpr(E.Identifier{
+ .ref = try b.newExternalSymbol("Error"),
+ }),
+ .args = try BabyList(Expr).fromSlice(b.allocator, &.{
+ b.newExpr(E.String{ .data = err_msg_string }),
+ }),
+ .close_parens_loc = Logger.Loc.Empty,
+ });
+
+ // export const Comp = registerClientReference(
+ // () => { throw new Error(...) },
+ // "src/filepath.tsx",
+ // "Comp"
+ // );
+ try b.appendStmt(S.Local{
+ .decls = try G.Decl.List.fromSlice(b.allocator, &.{.{
+ .binding = Binding.alloc(b.allocator, B.Identifier{ .ref = export_ref }, Logger.Loc.Empty),
+ .value = b.newExpr(E.Call{
+ .target = register_client_reference,
+ .args = try js_ast.ExprNodeList.fromSlice(b.allocator, &.{
+ b.newExpr(E.Arrow{ .body = .{
+ .stmts = try b.allocator.dupe(Stmt, &.{
+ b.newStmt(S.Throw{ .value = err_msg }),
+ }),
+ .loc = Logger.Loc.Empty,
+ } }),
+ module_path,
+ b.newExpr(E.String{ .data = key }),
+ }),
+ }),
+ }}),
+ .is_export = true,
+ .kind = .k_const,
+ });
+ }
+ }
+};
+
const IdentityContext = @import("../identity_context.zig").IdentityContext;
const RefVoidMap = std.ArrayHashMapUnmanaged(Ref, void, Ref.ArrayHashCtx, false);
@@ -3191,9 +3584,9 @@ const ResolvedExports = bun.StringArrayHashMapUnmanaged(ExportData);
const TopLevelSymbolToParts = js_ast.Ast.TopLevelSymbolToParts;
pub const WrapKind = enum(u2) {
- none = 0,
- cjs = 1,
- esm = 2,
+ none,
+ cjs,
+ esm,
};
pub const ImportData = struct {
@@ -3355,33 +3748,68 @@ pub const JSMeta = struct {
};
pub const Graph = struct {
- entry_points: std.ArrayListUnmanaged(Index) = .{},
- ast: MultiArrayList(JSAst) = .{},
-
- input_files: InputFile.List = .{},
-
+ // TODO: consider removing references to this in favor of bundler.options.code_splitting
code_splitting: bool = false,
- pool: *ThreadPool = undefined,
-
- heap: ThreadlocalArena = ThreadlocalArena{},
- /// Main thread only!!
+ pool: *ThreadPool,
+ heap: ThreadlocalArena = .{},
+ /// This allocator is thread-local to the Bundler thread
allocator: std.mem.Allocator = undefined,
+ /// Mapping user-specified entry points to their Source Index
+ entry_points: std.ArrayListUnmanaged(Index) = .{},
+ /// Every source index has an associated InputFile
+ input_files: MultiArrayList(InputFile) = .{},
+ /// Every source index has an associated Ast
+ /// When a parse is in progress / queued, it is `Ast.empty`
+ ast: MultiArrayList(JSAst) = .{},
+
+ // During the scan + parse phase, these atomics keep track
+ // of the remaining tasks. Once it hits zero, linking begins.
+ //
+ // TODO: these use atomicRmw across the codebase, but it seems at a glance
+ // that each usage is on the main thread. if that is not true, convert this
+ // to use std.atomic.Value instead. also consider merging the two, and also
+ // using u32, since Ref does not support addressing sources above maxInt(u31)
parse_pending: usize = 0,
resolve_pending: usize = 0,
- /// Stable source index mapping
- source_index_map: std.AutoArrayHashMapUnmanaged(Index.Int, Ref.Int) = .{},
+ /// Maps a hashed path string to a source index, if it exists in the compilation.
+ /// Instead of accessing this directly, consider using BundleV2.pathToSourceIndexMap
path_to_source_index_map: PathToSourceIndexMap = .{},
+ /// When using server components, a completely separate file listing is
+ /// required to avoid incorrect inlining of defines and dependencies on
+ /// other files. This is relevant for files shared between server and client
+ /// and have no "use " directive, and must be duplicated.
+ ///
+ /// To make linking easier, this second graph contains indices into the
+ /// same `.ast` and `.input_files` arrays.
+ client_path_to_source_index_map: PathToSourceIndexMap = .{},
+ /// When using server components with React, there is an additional module
+ /// graph which is used to contain SSR-versions of all client components;
+ /// the SSR graph. The difference between the SSR graph and the server
+ /// graph is that this one does not apply '--conditions react-server'
+ ///
+ /// In Bun's React Framework, it includes SSR versions of 'react' and
+ /// 'react-dom' (an export condition is used to provide a different
+ /// implementation for RSC, which is potentially how they implement
+ /// server-only features such as async components).
+ ssr_path_to_source_index_map: PathToSourceIndexMap = .{},
+
+ /// When Server Components is enabled, this holds a list of all boundary
+ /// files. This happens for all files with a "use " directive.
+ server_component_boundaries: ServerComponentBoundary.List = .{},
- use_directive_entry_points: UseDirective.List = .{},
+ // TODO: this has no reason to be using logger.Range
+ shadow_entry_point_range: Logger.Range = Logger.Range.None,
+ // TODO: document what makes this estimate not perfect
estimated_file_loader_count: usize = 0,
additional_output_files: std.ArrayListUnmanaged(options.OutputFile) = .{},
- shadow_entry_point_range: Logger.Range = Logger.Range.None,
- shadow_entry_points: std.ArrayListUnmanaged(ShadowEntryPoint) = .{},
+
+ kit_referenced_server_data: bool,
+ kit_referenced_client_data: bool,
pub const InputFile = struct {
source: Logger.Source,
@@ -3390,8 +3818,6 @@ pub const Graph = struct {
additional_files: BabyList(AdditionalFile) = .{},
unique_key_for_additional_file: string = "",
content_hash_for_additional_file: u64 = 0,
-
- pub const List = MultiArrayList(InputFile);
};
};
@@ -3452,14 +3878,15 @@ const EntryPoint = struct {
return this == .user_specified or this == .react_server_component;
}
+ // TODO: Rename this to isServerComponentBoundary
pub fn isReactReference(this: Kind) bool {
return this == .react_client_component or this == .react_server_component;
}
pub fn useDirective(this: Kind) UseDirective {
return switch (this) {
- .react_client_component => .@"use client",
- .react_server_component => .@"use server",
+ .react_client_component => .client,
+ .react_server_component => .server,
else => .none,
};
}
@@ -3474,10 +3901,6 @@ const AstSourceIDMapping = struct {
const LinkerGraph = struct {
const debug = Output.scoped(.LinkerGraph, false);
- /// TODO(@paperdave): remove this. i added it before realizing this is available
- /// via LinkerContext.parse_graph. it may also be worth removing the other cloned data.
- bundler_graph: *const Graph,
-
files: File.List = .{},
files_live: BitSet = undefined,
entry_points: EntryPoint.List = .{},
@@ -3523,13 +3946,13 @@ const LinkerGraph = struct {
pub fn useDirectiveBoundary(this: *const LinkerGraph, source_index: Index.Int) UseDirective {
if (this.react_client_component_boundary.bit_length > 0) {
if (this.react_client_component_boundary.isSet(source_index)) {
- return .@"use client";
+ return .client;
}
}
if (this.react_server_component_boundary.bit_length > 0) {
if (this.react_server_component_boundary.isSet(source_index)) {
- return .@"use server";
+ return .server;
}
}
@@ -3646,6 +4069,7 @@ const LinkerGraph = struct {
return part_id;
}
+
pub fn generateSymbolImportAndUse(
g: *LinkerGraph,
source_index: u32,
@@ -3722,10 +4146,11 @@ const LinkerGraph = struct {
this: *LinkerGraph,
entry_points: []const Index,
sources: []const Logger.Source,
- use_directive_entry_points: UseDirective.List,
+ server_component_boundaries: ServerComponentBoundary.List,
dynamic_import_entry_points: []const Index.Int,
shadow_entry_point_range: Logger.Range,
) !void {
+ const scb = server_component_boundaries.slice();
try this.files.setCapacity(this.allocator, sources.len);
this.files.zero();
this.files_live = try BitSet.initEmpty(
@@ -3743,7 +4168,7 @@ const LinkerGraph = struct {
// Setup entry points
{
- try this.entry_points.setCapacity(this.allocator, entry_points.len + use_directive_entry_points.len + dynamic_import_entry_points.len);
+ try this.entry_points.setCapacity(this.allocator, entry_points.len + server_component_boundaries.list.len + dynamic_import_entry_points.len);
this.entry_points.len = entry_points.len;
const source_indices = this.entry_points.items(.source_index);
@@ -3786,20 +4211,24 @@ const LinkerGraph = struct {
this.meta.len = this.ast.len;
this.meta.zero();
- if (use_directive_entry_points.len > 0) {
+ if (server_component_boundaries.list.len > 0) {
this.react_client_component_boundary = BitSet.initEmpty(this.allocator, this.files.len) catch unreachable;
this.react_server_component_boundary = BitSet.initEmpty(this.allocator, this.files.len) catch unreachable;
var any_server = false;
var any_client = false;
// Loop #1: populate the list of files that are react client components
- for (use_directive_entry_points.items(.use_directive), use_directive_entry_points.items(.source_index)) |use, source_id| {
- if (use == .@"use client") {
- any_client = true;
- this.react_client_component_boundary.set(source_id);
- } else if (use == .@"use server") {
- any_server = true;
- this.react_server_component_boundary.set(source_id);
+ for (scb.list.items(.use_directive), scb.list.items(.source_index)) |use, source_id| {
+ switch (use) {
+ .none => {},
+ .client => {
+ any_client = true;
+ this.react_client_component_boundary.set(source_id);
+ },
+ .server => {
+ any_server = true;
+ this.react_server_component_boundary.set(source_id);
+ },
}
}
@@ -3808,6 +4237,8 @@ const LinkerGraph = struct {
for (this.reachable_files) |source_id| {
const use_directive = this.useDirectiveBoundary(source_id.get());
const source_i32 = @as(i32, @intCast(source_id.get()));
+
+ // TODO(paperdave/kit): i am not sure if this logic is correct
const is_shadow_entrypoint = shadow_entry_point_range.contains(source_i32);
// If the reachable file has a "use client"; at the top
@@ -3821,20 +4252,27 @@ const LinkerGraph = struct {
const other = this.useDirectiveBoundary(source_index);
if (use_directive.boundering(other)) |boundary| {
-
- // That import is a React Server Component reference.
+ // That import is a Server Component reference.
switch (boundary) {
- .@"use client" => {
+ .client => {
if (!is_shadow_entrypoint) {
- const pretty = sources[source_index].path.pretty;
- import_record.module_id = bun.hash32(pretty);
- import_record.tag = .react_client_component;
- import_record.path.namespace = "client";
- import_record.print_namespace_in_path = true;
- import_record.source_index = Index.invalid;
+ // const pretty = sources[source_index].path.pretty;
+ // import_record.module_id = bun.hash32(pretty);
+ // import_record.tag = .react_client_component;
+ // import_record.path.namespace = "client";
+ // import_record.print_namespace_in_path = true;
+ import_record.source_index = Index.init(
+ scb.getReferenceSourceIndex(
+ source_index,
+ ) orelse unreachable, // file didn't have a boundary
+ );
+ bun.assert(import_record.source_index.isValid()); // did not generate
}
},
- .@"use server" => {
+ .server => {
+ {
+ bun.todoPanic(@src(), "\"use server\"", .{});
+ }
import_record.module_id = bun.hash32(sources[source_index].path.pretty);
import_record.tag = .react_server_component;
import_record.path.namespace = "server";
@@ -3852,7 +4290,6 @@ const LinkerGraph = struct {
entry_point_kinds[source_index] = .react_server_component;
}
},
- else => unreachable,
}
}
}
@@ -3877,11 +4314,9 @@ const LinkerGraph = struct {
stable_source_indices[source_index.get()] = Index.source(i);
}
- const file = LinkerGraph.File{};
- // TODO: verify this outputs efficient code
@memset(
files.items(.distance_from_entry_point),
- file.distance_from_entry_point,
+ (LinkerGraph.File{}).distance_from_entry_point,
);
this.stable_source_indices = @as([]const u32, @ptrCast(stable_source_indices));
}
@@ -3937,16 +4372,11 @@ const LinkerGraph = struct {
var resolved = ResolvedExports{};
resolved.ensureTotalCapacity(this.allocator, src.count()) catch unreachable;
for (src.keys(), src.values()) |key, value| {
- resolved.putAssumeCapacityNoClobber(
- key,
- .{
- .data = .{
- .import_ref = value.ref,
- .name_loc = value.alias_loc,
- .source_index = Index.source(source_index),
- },
- },
- );
+ resolved.putAssumeCapacityNoClobber(key, .{ .data = .{
+ .import_ref = value.ref,
+ .name_loc = value.alias_loc,
+ .source_index = Index.source(source_index),
+ } });
}
dest.* = resolved;
}
@@ -4024,6 +4454,10 @@ pub const LinkerContext = struct {
/// to know whether or not we can free it safely.
pending_task_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0),
+ /// Used by Kit to extract []CompileResult before it is joined
+ kit_dev_server: ?*bun.kit.DevServer = null,
+ framework: ?*const kit.Framework = null,
+
pub const LinkerOptions = struct {
output_format: options.Format = .esm,
ignore_dce_annotations: bool = false,
@@ -4035,7 +4469,7 @@ pub const LinkerContext = struct {
source_maps: options.SourceMapOption = .none,
target: options.Target = .browser,
- mode: Mode = Mode.bundle,
+ mode: Mode = .bundle,
public_path: []const u8 = "",
@@ -4151,7 +4585,7 @@ pub const LinkerContext = struct {
this: *LinkerContext,
bundle: *BundleV2,
entry_points: []Index,
- use_directive_entry_points: UseDirective.List,
+ server_component_boundaries: ServerComponentBoundary.List,
reachable: []Index,
) !void {
const trace = tracer(@src(), "CloneLinkerGraph");
@@ -4168,7 +4602,7 @@ pub const LinkerContext = struct {
const sources: []const Logger.Source = this.parse_graph.input_files.items(.source);
- try this.graph.load(entry_points, sources, use_directive_entry_points, bundle.dynamic_import_entry_points.keys(), bundle.graph.shadow_entry_point_range);
+ try this.graph.load(entry_points, sources, server_component_boundaries, bundle.dynamic_import_entry_points.keys(), bundle.graph.shadow_entry_point_range);
bundle.dynamic_import_entry_points.deinit();
this.wait_group.init();
this.ambiguous_result_pool = std.ArrayList(MatchImport).init(this.allocator);
@@ -4226,14 +4660,14 @@ pub const LinkerContext = struct {
this: *LinkerContext,
bundle: *BundleV2,
entry_points: []Index,
- use_directive_entry_points: UseDirective.List,
+ server_component_boundaries: ServerComponentBoundary.List,
reachable: []Index,
unique_key: u64,
) ![]Chunk {
try this.load(
bundle,
entry_points,
- use_directive_entry_points,
+ server_component_boundaries,
reachable,
);
@@ -4518,7 +4952,7 @@ pub const LinkerContext = struct {
pub fn visit(
v: *@This(),
source_index: Index.Int,
- comptime with_react_server_components: UseDirective.Flags,
+ comptime with_server_components: UseDirective.Flags,
comptime with_code_splitting: bool,
) void {
if (source_index == Index.invalid.value) return;
@@ -4532,12 +4966,12 @@ pub const LinkerContext = struct {
// when NOT code splitting, include the file in the chunk if ANY of the entry points overlap
v.entry_bits.hasIntersection(&v.c.graph.files.items(.entry_bits)[source_index]);
- if (comptime with_react_server_components.is_client or with_react_server_components.is_server) {
+ if (with_server_components.has_any_client or with_server_components.has_any_server) {
if (is_file_in_chunk and
v.entry_point.is_entry_point and
v.entry_point.source_index != source_index)
{
- if (comptime with_react_server_components.is_client) {
+ if (with_server_components.has_any_client) {
if (v.c.graph.react_client_component_boundary.isSet(source_index)) {
if (!v.c.graph.react_client_component_boundary.isSet(v.entry_point.source_index)) {
return;
@@ -4545,7 +4979,7 @@ pub const LinkerContext = struct {
}
}
- if (comptime with_react_server_components.is_server) {
+ if (with_server_components.has_any_server) {
if (v.c.graph.react_server_component_boundary.isSet(source_index)) {
if (!v.c.graph.react_server_component_boundary.isSet(v.entry_point.source_index)) {
return;
@@ -4576,7 +5010,7 @@ pub const LinkerContext = struct {
continue;
}
- v.visit(record.source_index.get(), with_react_server_components, with_code_splitting);
+ v.visit(record.source_index.get(), with_server_components, with_code_splitting);
}
}
@@ -4643,8 +5077,8 @@ pub const LinkerContext = struct {
visitor.visit(
Index.runtime.value,
.{
- .is_server = with_server,
- .is_client = with_client,
+ .has_any_server = with_server,
+ .has_any_client = with_client,
},
with_code_splitting,
);
@@ -4652,8 +5086,8 @@ pub const LinkerContext = struct {
visitor.visit(
order.source_index,
.{
- .is_server = with_server,
- .is_client = with_client,
+ .has_any_server = with_server,
+ .has_any_client = with_client,
},
with_code_splitting,
);
@@ -6134,6 +6568,51 @@ pub const LinkerContext = struct {
entry_point_kinds,
);
}
+
+ // When using server components with a separated SSR graph, these
+ // components are not required to be referenced; The framework may
+ // use a dynamic import to get a handle to it.
+ if (c.framework) |fw| if (fw.server_components) |sc| {
+ if (sc.separate_ssr_graph) {
+ const slice = c.parse_graph.server_component_boundaries.list.slice();
+ for (slice.items(.use_directive), slice.items(.ssr_source_index)) |use, ssr_source_index| {
+ switch (use) {
+ .client => {
+ c.markFileLiveForTreeShaking(
+ ssr_source_index,
+ side_effects,
+ parts,
+ import_records,
+ entry_point_kinds,
+ );
+ },
+ .server => bun.todoPanic(@src(), "rewire hot-bundling code", .{}),
+ else => unreachable,
+ }
+ }
+ }
+
+ // TODO: this is a workaround for a missing tree-shaking
+ // annotated wrt these generated segments
+ if (c.parse_graph.kit_referenced_server_data) {
+ c.markFileLiveForTreeShaking(
+ Index.kit_server_data.get(),
+ side_effects,
+ parts,
+ import_records,
+ entry_point_kinds,
+ );
+ }
+ if (c.parse_graph.kit_referenced_client_data) {
+ c.markFileLiveForTreeShaking(
+ Index.kit_client_data.get(),
+ side_effects,
+ parts,
+ import_records,
+ entry_point_kinds,
+ );
+ }
+ };
}
{
@@ -6165,6 +6644,27 @@ pub const LinkerContext = struct {
import_records,
file_entry_bits,
);
+
+ if (c.framework) |fw| if (fw.server_components) |sc| if (sc.separate_ssr_graph) {
+ const slice = c.parse_graph.server_component_boundaries.list.slice();
+ for (slice.items(.use_directive), slice.items(.ssr_source_index)) |use, ssr_source_index| {
+ switch (use) {
+ .client => {
+ c.markFileReachableForCodeSplitting(
+ ssr_source_index,
+ i,
+ distances,
+ 0,
+ parts,
+ import_records,
+ file_entry_bits,
+ );
+ },
+ .server => bun.todoPanic(@src(), "rewire hot-bundling code", .{}),
+ else => unreachable,
+ }
+ }
+ };
}
}
}
@@ -6377,18 +6877,6 @@ pub const LinkerContext = struct {
if (other_chunk_index == chunk_index or other_chunk.content != .javascript) continue;
if (other_chunk.entry_bits.isSet(chunk.entry_point.entry_point_id)) {
- if (other_chunk.entry_point.is_entry_point) {
- if (c.graph.react_client_component_boundary.bit_length > 0 or c.graph.react_server_component_boundary.bit_length > 0) {
- const other_kind = c.graph.files.items(.entry_point_kind)[other_chunk.entry_point.source_index];
- const this_kind = c.graph.files.items(.entry_point_kind)[chunk.entry_point.source_index];
-
- if (this_kind != .react_client_component and
- other_kind.isReactReference())
- {
- continue;
- }
- }
- }
_ = js.imports_from_other_chunks.getOrPutValue(
c.allocator,
@as(u32, @truncate(other_chunk_index)),
@@ -6913,8 +7401,15 @@ pub const LinkerContext = struct {
const trace = tracer(@src(), "generateCodeForFileInChunkJS");
defer trace.end();
+ // Client bundles for Kit must be globally allocated,
+ // as it must outlive the bundle task.
+ const use_global_allocator = c.kit_dev_server != null and
+ c.parse_graph.ast.items(.target)[part_range.source_index.get()].kitRenderer() == .client;
+
var arena = &worker.temporary_arena;
- var buffer_writer = js_printer.BufferWriter.init(worker.allocator) catch unreachable;
+ var buffer_writer = js_printer.BufferWriter.init(
+ if (use_global_allocator) default_allocator else worker.allocator,
+ ) catch bun.outOfMemory();
defer _ = arena.reset(.retain_capacity);
worker.stmt_list.reset();
@@ -7251,10 +7746,9 @@ pub const LinkerContext = struct {
}
{
const input = c.parse_graph.input_files.items(.source)[chunk.entry_point.source_index].path;
- // var buf = MutableString.initEmpty(c.allocator);
- // js_printer.quoteForJSONBuffer(input.pretty, &buf, true) catch bun.outOfMemory();
- // const str = buf.toOwnedSliceLeaky(); // c.allocator is an arena
- const str = try std.fmt.allocPrint(c.allocator, "{d}", .{input.hashForKit()});
+ var buf = MutableString.initEmpty(worker.allocator);
+ js_printer.quoteForJSONBuffer(input.pretty, &buf, true) catch bun.outOfMemory();
+ const str = buf.toOwnedSliceLeaky(); // worker.allocator is an arena
j.pushStatic(str);
line_offset.advance(str);
}
@@ -8078,9 +8572,6 @@ pub const LinkerContext = struct {
ast: *const JSAst,
) !bool {
const record = ast.import_records.at(import_record_index);
- if (record.tag.isReactReference())
- return false;
-
// Is this an external import?
if (!record.source_index.isValid()) {
// Keep the "import" statement if import statements are supported
@@ -8731,7 +9222,7 @@ pub const LinkerContext = struct {
allocator: std.mem.Allocator,
ast: *const JSAst,
) !void {
- _ = source_index; // autofix
+ _ = source_index; // may be used
const receiver_args = try allocator.dupe(G.Arg, &.{
.{ .binding = Binding.alloc(allocator, B.Identifier{ .ref = ast.module_ref }, Logger.Loc.Empty) },
@@ -8745,10 +9236,8 @@ pub const LinkerContext = struct {
.s_local => |st| {
// TODO: check if this local is immediately assigned
// `require()` if so, we will instrument it with hot module
- // reloading. other cases of `require` won't receive receive
- // updates.
- _ = st; // autofix
-
+ // reloading. other cases of `require` won't receive updates.
+ _ = st;
try stmts.inside_wrapper_suffix.append(stmt);
},
.s_import => |st| {
@@ -8758,31 +9247,32 @@ pub const LinkerContext = struct {
// automatically, instead of with bundler-added
// annotations like '__commonJS'.
//
- // this is not done in the parse step because the final
+ // this cannot be done in the parse step because the final
// pretty path is not yet known. the other statement types
// are not handled here because some of those generate
// new local variables (it is too late to do that here).
const record = ast.import_records.at(st.import_record_index);
- const path = c.parse_graph.input_files.items(.source)[record.source_index.get()].path;
+ const path = if (record.source_index.isValid())
+ c.parse_graph.input_files.items(.source)[record.source_index.get()].path
+ else
+ record.path;
+ const is_builtin = record.tag == .builtin or record.tag == .bun_test or record.tag == .bun;
const is_bare_import = st.star_name_loc == null and st.items.len == 0 and st.default_name == null;
- const key_expr = Expr.init(E.InlinedEnum, .{
- .comment = path.pretty,
- .value = Expr.init(E.Number, .{
- .value = @floatFromInt(path.hashForKit()),
- }, stmt.loc),
+ const key_expr = Expr.init(E.String, .{
+ .data = path.pretty,
}, stmt.loc);
// module.importSync('path', (module) => ns = module)
const call = Expr.init(E.Call, .{
.target = Expr.init(E.Dot, .{
.target = module_id,
- .name = "importSync",
+ .name = if (is_builtin) "importBuiltin" else "importSync",
.name_loc = stmt.loc,
}, stmt.loc),
.args = js_ast.ExprNodeList.init(
- try allocator.dupe(Expr, if (is_bare_import)
+ try allocator.dupe(Expr, if (is_bare_import or is_builtin)
&.{key_expr}
else
&.{
@@ -9044,6 +9534,16 @@ pub const LinkerContext = struct {
// Turn each module into a function if this is Kit
var stmt_storage: Stmt = undefined;
if (c.options.output_format == .internal_kit_dev and !part_range.source_index.isRuntime()) {
+ if (stmts.all_stmts.items.len == 0) {
+ // TODO: these chunks should just not exist in the first place
+ // they seem to happen on the entry point? or JSX? not clear
+ // removing the chunk in the parser breaks the liveness analysis.
+ //
+ // The workaround is to end early on empty files, and filter out
+ // empty files later.
+ return .{ .result = .{ .code = "", .source_map = null } };
+ }
+
var clousure_args = std.BoundedArray(G.Arg, 2).fromSlice(&.{
.{ .binding = Binding.alloc(temp_allocator, B.Identifier{
.ref = ast.module_ref,
@@ -9533,8 +10033,8 @@ pub const LinkerContext = struct {
for (chunk.content.javascript.parts_in_chunk_in_order, 0..) |part_range, i| {
remaining_part_ranges[0] = .{
.part_range = part_range,
- .i = @as(u32, @truncate(i)),
- .task = ThreadPoolLib.Task{
+ .i = @truncate(i),
+ .task = .{
.callback = &generateCompileResultForJSChunk,
},
.ctx = chunk_ctx,
@@ -9557,6 +10057,36 @@ pub const LinkerContext = struct {
c.source_maps.quoted_contents_tasks.len = 0;
}
+ // When kit.DevServer is in use, we're going to take a different code path at the end.
+ // We want to extract the source code of each part instead of combining it into a single file.
+ // This is so that when hot-module updates happen, we can:
+ //
+ // - Reuse unchanged parts to assemble the full bundle if Cmd+R is used in the browser
+ // - Send only the newly changed code through a socket.
+ //
+ // When this isnt the initial bundle, the data we would get concatenating
+ // everything here would be useless.
+ if (c.kit_dev_server) |dev_server| {
+ const input_file_sources = c.parse_graph.input_files.items(.source);
+ const targets = c.parse_graph.ast.items(.target);
+ for (chunks) |chunk| {
+ for (
+ chunk.content.javascript.parts_in_chunk_in_order,
+ chunk.compile_results_for_chunk,
+ ) |part_range, compile_result| {
+ try dev_server.receiveChunk(
+ input_file_sources[part_range.source_index.get()].path.text,
+ targets[part_range.source_index.get()].kitRenderer(),
+ compile_result,
+ );
+ }
+ }
+
+ // kit.main_path = default_allocator.dupe(u8, c.parse_graph.input_files.items(.source)[chunks[0].entry_point.source_index].path.pretty) catch bun.outOfMemory();
+
+ return std.ArrayList(options.OutputFile).init(bun.default_allocator);
+ }
+
{
debug(" START {d} postprocess chunks", .{chunks.len});
defer debug(" DONE {d} postprocess chunks", .{chunks.len});
@@ -9666,152 +10196,10 @@ pub const LinkerContext = struct {
}
}
- const react_client_components_manifest: []u8 = if (c.resolver.opts.react_server_components) brk: {
- var bytes = std.ArrayList(u8).init(c.allocator);
- defer bytes.deinit();
- const all_sources = c.parse_graph.input_files.items(.source);
- var all_named_exports = c.graph.ast.items(.named_exports);
- var export_names = std.ArrayList(Api.StringPointer).init(c.allocator);
- defer export_names.deinit();
-
- var client_modules = std.ArrayList(Api.ClientServerModule).initCapacity(c.allocator, c.graph.react_client_component_boundary.count()) catch unreachable;
- defer client_modules.deinit();
- var server_modules = std.ArrayList(Api.ClientServerModule).initCapacity(c.allocator, c.graph.react_server_component_boundary.count()) catch unreachable;
- defer server_modules.deinit();
-
- var react_client_components_iterator = c.graph.react_client_component_boundary.iterator(.{});
- var react_server_components_iterator = c.graph.react_server_component_boundary.iterator(.{});
-
- var sorted_client_component_ids = std.ArrayList(u32).initCapacity(c.allocator, client_modules.capacity) catch unreachable;
- defer sorted_client_component_ids.deinit();
- while (react_client_components_iterator.next()) |source_index| {
- if (!c.graph.files_live.isSet(source_index)) continue;
- sorted_client_component_ids.appendAssumeCapacity(@as(u32, @intCast(source_index)));
- }
-
- var sorted_server_component_ids = std.ArrayList(u32).initCapacity(c.allocator, server_modules.capacity) catch unreachable;
- defer sorted_server_component_ids.deinit();
- while (react_server_components_iterator.next()) |source_index| {
- if (!c.graph.files_live.isSet(source_index)) continue;
- sorted_server_component_ids.appendAssumeCapacity(@as(u32, @intCast(source_index)));
- }
-
- const Sorter = struct {
- sources: []const Logger.Source,
- pub fn isLessThan(ctx: @This(), a_index: u32, b_index: u32) bool {
- const a = ctx.sources[a_index].path.text;
- const b = ctx.sources[b_index].path.text;
- return strings.order(a, b) == .lt;
- }
- };
- std.sort.pdq(u32, sorted_client_component_ids.items, Sorter{ .sources = all_sources }, Sorter.isLessThan);
- std.sort.pdq(u32, sorted_server_component_ids.items, Sorter{ .sources = all_sources }, Sorter.isLessThan);
-
- inline for (.{
- sorted_client_component_ids.items,
- sorted_server_component_ids.items,
- }, .{
- &client_modules,
- &server_modules,
- }) |sorted_component_ids, modules| {
- for (sorted_component_ids) |component_source_index| {
- var source_index_for_named_exports = component_source_index;
-
- const chunk: *Chunk = brk2: {
- for (chunks) |*chunk_| {
- if (!chunk_.entry_point.is_entry_point) continue;
- if (chunk_.entry_point.source_index == @as(u32, @intCast(component_source_index))) {
- break :brk2 chunk_;
- }
-
- if (chunk_.files_with_parts_in_chunk.contains(component_source_index)) {
- source_index_for_named_exports = chunk_.entry_point.source_index;
- break :brk2 chunk_;
- }
- }
-
- @panic("could not find chunk for component");
- };
-
- var grow_length: usize = 0;
-
- const named_exports = all_named_exports[source_index_for_named_exports].keys();
-
- try export_names.ensureUnusedCapacity(named_exports.len);
- const exports_len = @as(u32, @intCast(named_exports.len));
- const exports_start = @as(u32, @intCast(export_names.items.len));
-
- grow_length += chunk.final_rel_path.len;
-
- grow_length += all_sources[component_source_index].path.pretty.len;
-
- for (named_exports) |export_name| {
- try export_names.append(Api.StringPointer{
- .offset = @as(u32, @intCast(bytes.items.len + grow_length)),
- .length = @as(u32, @intCast(export_name.len)),
- });
- grow_length += export_name.len;
- }
-
- try bytes.ensureUnusedCapacity(grow_length);
-
- const input_name = Api.StringPointer{
- .offset = @as(u32, @intCast(bytes.items.len)),
- .length = @as(u32, @intCast(all_sources[component_source_index].path.pretty.len)),
- };
-
- bytes.appendSliceAssumeCapacity(all_sources[component_source_index].path.pretty);
-
- const asset_name = Api.StringPointer{
- .offset = @as(u32, @intCast(bytes.items.len)),
- .length = @as(u32, @intCast(chunk.final_rel_path.len)),
- };
-
- bytes.appendSliceAssumeCapacity(chunk.final_rel_path);
-
- for (named_exports) |export_name| {
- bytes.appendSliceAssumeCapacity(export_name);
- }
-
- modules.appendAssumeCapacity(.{
- .module_id = bun.hash32(all_sources[component_source_index].path.pretty),
- .asset_name = asset_name,
- .input_name = input_name,
- .export_names = .{
- .length = exports_len,
- .offset = exports_start,
- },
- });
- }
- }
-
- if (client_modules.items.len == 0 and server_modules.items.len == 0) break :brk &.{};
-
- var manifest = Api.ClientServerModuleManifest{
- .version = 2,
- .client_modules = client_modules.items,
-
- // TODO:
- .ssr_modules = client_modules.items,
-
- .server_modules = server_modules.items,
- .export_names = export_names.items,
- .contents = bytes.items,
- };
- var byte_buffer = std.ArrayList(u8).initCapacity(bun.default_allocator, bytes.items.len) catch unreachable;
- var byte_buffer_writer = byte_buffer.writer();
- const SchemaWriter = schema.Writer(@TypeOf(&byte_buffer_writer));
- var writer = SchemaWriter.init(&byte_buffer_writer);
- manifest.encode(&writer) catch unreachable;
- break :brk byte_buffer.items;
- } else &.{};
-
var output_files = std.ArrayList(options.OutputFile).initCapacity(
bun.default_allocator,
- (if (c.options.source_maps.hasExternalFiles()) chunks.len * 2 else chunks.len) + @as(
- usize,
- @intFromBool(react_client_components_manifest.len > 0) + c.parse_graph.additional_output_files.items.len,
- ),
+ (if (c.options.source_maps.hasExternalFiles()) chunks.len * 2 else chunks.len) +
+ @as(usize, c.parse_graph.additional_output_files.items.len),
) catch unreachable;
const root_path = c.resolver.opts.output_dir;
@@ -9822,7 +10210,7 @@ pub const LinkerContext = struct {
}
if (root_path.len > 0) {
- try c.writeOutputFilesToDisk(root_path, chunks, react_client_components_manifest, &output_files);
+ try c.writeOutputFilesToDisk(root_path, chunks, &output_files);
} else {
// In-memory build
@@ -9946,25 +10334,6 @@ pub const LinkerContext = struct {
}
}
- if (react_client_components_manifest.len > 0) {
- output_files.appendAssumeCapacity(options.OutputFile.init(
- .{
- .data = .{
- .buffer = .{
- .data = react_client_components_manifest,
- .allocator = bun.default_allocator,
- },
- },
-
- .input_path = try bun.default_allocator.dupe(u8, components_manifest_path),
- .output_path = try bun.default_allocator.dupe(u8, components_manifest_path),
- .loader = .file,
- .input_loader = .file,
- .output_kind = .@"component-manifest",
- },
- ));
- }
-
output_files.appendSliceAssumeCapacity(c.parse_graph.additional_output_files.items);
}
@@ -10006,11 +10375,11 @@ pub const LinkerContext = struct {
if (strings.eqlComptime(from_chunk_dir, "."))
from_chunk_dir = "";
- const additional_files: []AdditionalFile = c.graph.bundler_graph.input_files.items(.additional_files)[piece.index.index].slice();
+ const additional_files: []AdditionalFile = c.parse_graph.input_files.items(.additional_files)[piece.index.index].slice();
bun.assert(additional_files.len > 0);
switch (additional_files[0]) {
.output_file => |output_file_id| {
- const path = c.graph.bundler_graph.additional_output_files.items[output_file_id].dest_path;
+ const path = c.parse_graph.additional_output_files.items[output_file_id].dest_path;
hash.write(bun.path.relativePlatform(from_chunk_dir, path, .posix, false));
},
.source_index => {},
@@ -10028,7 +10397,6 @@ pub const LinkerContext = struct {
c: *LinkerContext,
root_path: string,
chunks: []Chunk,
- react_client_components_manifest: []const u8,
output_files: *std.ArrayList(options.OutputFile),
) !void {
const trace = tracer(@src(), "writeOutputFilesToDisk");
@@ -10269,57 +10637,57 @@ pub const LinkerContext = struct {
}
}
- if (react_client_components_manifest.len > 0) {
- switch (JSC.Node.NodeFS.writeFileWithPathBuffer(
- &pathbuf,
- JSC.Node.Arguments.WriteFile{
- .data = JSC.Node.StringOrBuffer{
- .buffer = JSC.Buffer{
- .buffer = .{
- .ptr = @constCast(react_client_components_manifest.ptr),
- // TODO: handle > 4 GB files
- .len = @as(u32, @truncate(react_client_components_manifest.len)),
- .byte_len = @as(u32, @truncate(react_client_components_manifest.len)),
- },
- },
- },
- .encoding = .buffer,
- .dirfd = bun.toFD(root_dir.fd),
- .file = .{
- .path = JSC.Node.PathLike{
- .string = JSC.PathString.init(components_manifest_path),
- },
- },
- },
- )) {
- .err => |err| {
- const utf8 = err.toSystemError().message.toUTF8(bun.default_allocator);
- defer utf8.deinit();
- c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "{} writing chunk {}", .{
- bun.fmt.quote(utf8.slice()),
- bun.fmt.quote(components_manifest_path),
- }) catch unreachable;
- return error.WriteFailed;
- },
- .result => {},
- }
+ // if (react_client_components_manifest.len > 0) {
+ // switch (JSC.Node.NodeFS.writeFileWithPathBuffer(
+ // &pathbuf,
+ // JSC.Node.Arguments.WriteFile{
+ // .data = JSC.Node.StringOrBuffer{
+ // .buffer = JSC.Buffer{
+ // .buffer = .{
+ // .ptr = @constCast(react_client_components_manifest.ptr),
+ // // TODO: handle > 4 GB files
+ // .len = @as(u32, @truncate(react_client_components_manifest.len)),
+ // .byte_len = @as(u32, @truncate(react_client_components_manifest.len)),
+ // },
+ // },
+ // },
+ // .encoding = .buffer,
+ // .dirfd = bun.toFD(root_dir.fd),
+ // .file = .{
+ // .path = JSC.Node.PathLike{
+ // .string = JSC.PathString.init(components_manifest_path),
+ // },
+ // },
+ // },
+ // )) {
+ // .err => |err| {
+ // const utf8 = err.toSystemError().message.toUTF8(bun.default_allocator);
+ // defer utf8.deinit();
+ // c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "{} writing chunk {}", .{
+ // bun.fmt.quote(utf8.slice()),
+ // bun.fmt.quote(components_manifest_path),
+ // }) catch unreachable;
+ // return error.WriteFailed;
+ // },
+ // .result => {},
+ // }
- output_files.appendAssumeCapacity(
- options.OutputFile.init(
- options.OutputFile.Options{
- .data = .{
- .saved = 0,
- },
- .loader = .file,
- .input_loader = .file,
- .output_kind = .@"component-manifest",
- .size = @as(u32, @truncate(react_client_components_manifest.len)),
- .input_path = bun.default_allocator.dupe(u8, components_manifest_path) catch unreachable,
- .output_path = bun.default_allocator.dupe(u8, components_manifest_path) catch unreachable,
- },
- ),
- );
- }
+ // output_files.appendAssumeCapacity(
+ // options.OutputFile.init(
+ // options.OutputFile.Options{
+ // .data = .{
+ // .saved = 0,
+ // },
+ // .loader = .file,
+ // .input_loader = .file,
+ // .output_kind = .@"component-manifest",
+ // .size = @as(u32, @truncate(react_client_components_manifest.len)),
+ // .input_path = bun.default_allocator.dupe(u8, components_manifest_path) catch unreachable,
+ // .output_path = bun.default_allocator.dupe(u8, components_manifest_path) catch unreachable,
+ // },
+ // ),
+ // );
+ // }
{
const offset = output_files.items.len;
@@ -10453,13 +10821,9 @@ pub const LinkerContext = struct {
);
// TODO: CSS AST
- var imports_a_boundary = false;
- const use_directive = c.graph.useDirectiveBoundary(source_index);
for (import_records[source_index].slice()) |*record| {
- const is_boundary = use_directive.isBoundary(record.tag.useDirective());
- imports_a_boundary = use_directive != .none and (imports_a_boundary or is_boundary);
- if (record.source_index.isValid() and !is_boundary and !c.isExternalDynamicImport(record, source_index)) {
+ if (record.source_index.isValid() and !c.isExternalDynamicImport(record, source_index)) {
c.markFileReachableForCodeSplitting(
record.source_index.get(),
entry_points_count,
@@ -10476,12 +10840,6 @@ pub const LinkerContext = struct {
for (parts_in_file) |part| {
for (part.dependencies.slice()) |dependency| {
if (dependency.source_index.get() != source_index) {
- if (imports_a_boundary and
- // "use client" -> "use server" imports don't
- use_directive.isBoundary(c.graph.files.items(.entry_point_kind)[dependency.source_index.get()]
- .useDirective()))
- continue;
-
c.markFileReachableForCodeSplitting(
dependency.source_index.get(),
entry_points_count,
@@ -10507,7 +10865,7 @@ pub const LinkerContext = struct {
if (comptime bun.Environment.allow_assert) {
debugTreeShake("markFileLiveForTreeShaking({d}, {s}) = {s}", .{
source_index,
- c.parse_graph.input_files.get(source_index).source.path.text,
+ c.parse_graph.input_files.get(source_index).source.path.pretty,
if (c.graph.files_live.isSet(source_index)) "seen" else "not seen",
});
}
@@ -10615,7 +10973,7 @@ pub const LinkerContext = struct {
if (comptime bun.Environment.isDebug) {
debugTreeShake("markPartLiveForTreeShaking({d}): {s}:{d} = {d}, {s}", .{
source_index,
- c.parse_graph.input_files.get(source_index).source.path.text,
+ c.parse_graph.input_files.get(source_index).source.path.pretty,
part_index,
if (part.stmts.len > 0) part.stmts[0].loc.start else Logger.Loc.Empty.start,
if (part.stmts.len > 0) @tagName(part.stmts[0].data) else @tagName(Stmt.empty().data),
@@ -11223,7 +11581,7 @@ pub const LinkerContext = struct {
imports_to_bind: *RefImportData,
source_index: Index.Int,
) void {
- var named_imports = named_imports_ptr.cloneWithAllocator(c.allocator) catch unreachable;
+ var named_imports = named_imports_ptr.clone(c.allocator) catch bun.outOfMemory();
defer named_imports_ptr.* = named_imports;
const Sorter = struct {
@@ -11248,13 +11606,10 @@ pub const LinkerContext = struct {
const import_ref = ref;
var re_exports = std.ArrayList(js_ast.Dependency).init(c.allocator);
- const result = c.matchImportWithExport(
- .{
- .source_index = Index.source(source_index),
- .import_ref = import_ref,
- },
- &re_exports,
- );
+ const result = c.matchImportWithExport(.{
+ .source_index = Index.source(source_index),
+ .import_ref = import_ref,
+ }, &re_exports);
switch (result.kind) {
.normal => {
@@ -12124,7 +12479,7 @@ pub const CrossChunkImport = struct {
}
};
-const CompileResult = union(enum) {
+pub const CompileResult = union(enum) {
javascript: struct {
source_index: Index.Int,
result: js_printer.PrintResult,
@@ -12240,113 +12595,6 @@ fn cheapPrefixNormalizer(prefix: []const u8, suffix: []const u8) [2]string {
};
}
-const components_manifest_path = "./components-manifest.blob";
-
-// For Server Components, we generate an entry point which re-exports all client components
-// This is a "shadow" of the server entry point.
-// The client is expected to import this shadow entry point
-const ShadowEntryPoint = struct {
- from_source_index: Index.Int,
- to_source_index: Index.Int,
-
- named_exports: bun.BabyList(NamedExport) = .{},
-
- pub const NamedExport = struct {
- // TODO: packed string
- from: string,
- to: string,
- source_index: Index.Int,
- };
-
- pub const Builder = struct {
- source_code_buffer: MutableString,
- ctx: *BundleV2,
- resolved_source_indices: std.ArrayList(Index.Int),
- shadow: *ShadowEntryPoint,
-
- pub fn addClientComponent(
- this: *ShadowEntryPoint.Builder,
- source_index: usize,
- ) void {
- var writer = this.source_code_buffer.writer();
- const path = this.ctx.graph.input_files.items(.source)[source_index].path;
- // TODO: tree-shaking to named imports only
- writer.print(
- \\// {s}
- \\import {} from '${d}';
- \\export {};
- \\
- ,
- .{
- path.pretty,
- ImportsFormatter{ .ctx = this.ctx, .source_index = @as(Index.Int, @intCast(source_index)), .pretty = path.pretty },
- bun.fmt.hexIntUpper(bun.hash(path.pretty)),
- ExportsFormatter{ .ctx = this.ctx, .source_index = @as(Index.Int, @intCast(source_index)), .pretty = path.pretty, .shadow = this.shadow },
- },
- ) catch unreachable;
- this.resolved_source_indices.append(@as(Index.Int, @truncate(source_index))) catch unreachable;
- }
- };
- const ImportsFormatter = struct {
- ctx: *BundleV2,
- pretty: string,
- source_index: Index.Int,
- pub fn format(self: ImportsFormatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
- var this = self.ctx;
- const named_exports: *js_ast.Ast.NamedExports = &this.graph.ast.items(.named_exports)[self.source_index];
- try writer.writeAll("{");
- for (named_exports.keys()) |*named| {
- named.* = try std.fmt.allocPrint(
- this.graph.allocator,
- "${}_{s}",
- .{
- bun.fmt.hexIntLower(bun.hash(self.pretty)),
- named.*,
- },
- );
- }
- try named_exports.reIndex();
-
- for (named_exports.keys(), 0..) |name, i| {
- try writer.writeAll(name);
- if (i < named_exports.count() - 1) {
- try writer.writeAll(" , ");
- }
- }
- try writer.writeAll("}");
- }
- };
-
- const ExportsFormatter = struct {
- ctx: *BundleV2,
- pretty: string,
- source_index: Index.Int,
- shadow: *ShadowEntryPoint,
- pub fn format(self: ExportsFormatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
- var this = self.ctx;
- const named_exports: js_ast.Ast.NamedExports = this.graph.ast.items(.named_exports)[self.source_index];
- try writer.writeAll("{");
- var shadow = self.shadow;
- try shadow.named_exports.ensureUnusedCapacity(this.graph.allocator, named_exports.count());
- const last = named_exports.count() - 1;
- for (named_exports.keys(), 0..) |name, i| {
- try shadow.named_exports.push(this.graph.allocator, .{
- .from = name,
- .to = name,
- .source_index = self.source_index,
- });
-
- try writer.writeAll(name);
-
- if (i < last) {
- try writer.writeAll(" , ");
- }
- }
- try writer.writeAll("}");
- }
- };
-};
-
fn getRedirectId(id: u32) ?u32 {
if (id == std.math.maxInt(u32)) {
return null;
@@ -12368,3 +12616,334 @@ fn targetFromHashbang(buffer: []const u8) ?options.Target {
return null;
}
+
+/// Utility to construct `Ast`s intended for generated code, such as the
+/// boundary modules when dealing with server components. This is a saner
+/// alternative to building a string, then sending it through `js_parser`
+///
+/// For in-depth details on the fields, most of these are documented
+/// inside of `js_parser`
+pub const AstBuilder = struct {
+ allocator: std.mem.Allocator,
+ source: *const Logger.Source,
+ source_index: u31,
+ stmts: std.ArrayListUnmanaged(Stmt),
+ scopes: std.ArrayListUnmanaged(*Scope),
+ symbols: std.ArrayListUnmanaged(Symbol),
+ import_records: std.ArrayListUnmanaged(ImportRecord),
+ named_imports: js_ast.Ast.NamedImports,
+ named_exports: js_ast.Ast.NamedExports,
+ import_records_for_current_part: std.ArrayListUnmanaged(u32),
+ export_star_import_records: std.ArrayListUnmanaged(u32),
+ current_scope: *Scope,
+ log: Logger.Log,
+ module_ref: Ref,
+ declared_symbols: js_ast.DeclaredSymbol.List,
+ /// When set, codegen is altered
+ hot_reloading: bool,
+
+ // stub fields for ImportScanner duck typing
+ comptime options: js_parser.Parser.Options = .{
+ .jsx = .{},
+ .bundle = true,
+ },
+ comptime import_items_for_namespace: struct {
+ pub fn get(_: @This(), _: Ref) ?js_parser.ImportItemForNamespaceMap {
+ return null;
+ }
+ } = .{},
+ pub const parser_features = struct {
+ pub const typescript = false;
+ };
+
+ pub fn init(allocator: std.mem.Allocator, source: *const Logger.Source, hot_reloading: bool) !AstBuilder {
+ const scope = try allocator.create(Scope);
+ scope.* = .{
+ .kind = .entry,
+ .label_ref = null,
+ .parent = null,
+ .generated = .{},
+ };
+ var ab: AstBuilder = .{
+ .allocator = allocator,
+ .current_scope = scope,
+ .source = source,
+ .source_index = @intCast(source.index.get()),
+ .stmts = .{},
+ .scopes = .{},
+ .symbols = .{},
+ .import_records = .{},
+ .import_records_for_current_part = .{},
+ .named_imports = .{},
+ .named_exports = .{},
+ .log = Logger.Log.init(allocator),
+ .export_star_import_records = .{},
+ .module_ref = Ref.None,
+ .declared_symbols = .{},
+ .hot_reloading = hot_reloading,
+ };
+ ab.module_ref = try ab.newSymbol(.other, "module");
+ return ab;
+ }
+
+ pub fn pushScope(p: *AstBuilder, kind: Scope.Kind) *js_ast.Scope {
+ try p.scopes.ensureUnusedCapacity(p.allocator, 1);
+ try p.current_scope.children.ensureUnusedCapacity(p.allocator, 1);
+ const scope = try p.allocator.create(Scope);
+ scope.* = .{
+ .kind = kind,
+ .label_ref = null,
+ .parent = p.current_scope,
+ .generated = .{},
+ };
+ p.current_scope.children.appendAssumeCapacity(scope);
+ p.scopes.appendAssumeCapacity(p.current_scope);
+ p.current_scope = scope;
+ return scope;
+ }
+
+ pub fn popScope(p: *AstBuilder) void {
+ p.current_scope = p.scopes.pop();
+ }
+
+ pub fn newSymbol(p: *AstBuilder, kind: Symbol.Kind, identifier: []const u8) !Ref {
+ const inner_index: Ref.Int = @intCast(p.symbols.items.len);
+ try p.symbols.append(p.allocator, .{
+ .kind = kind,
+ .original_name = identifier,
+ .debug_mode_source_index = if (Environment.allow_assert) @intCast(p.source_index) else 0,
+ });
+ const ref: Ref = .{
+ .inner_index = inner_index,
+ .source_index = p.source_index,
+ .tag = .symbol,
+ };
+ try p.current_scope.generated.push(p.allocator, ref);
+ return ref;
+ }
+
+ pub fn getSymbol(p: *AstBuilder, ref: Ref) *Symbol {
+ bun.assert(ref.source_index == p.source.index.get());
+ return &p.symbols.items[ref.inner_index];
+ }
+
+ pub fn addImportRecord(p: *AstBuilder, path: []const u8, kind: ImportKind) !u32 {
+ const index = p.import_records.items.len;
+ try p.import_records.append(p.allocator, .{
+ .path = bun.fs.Path.init(path),
+ .kind = kind,
+ .range = .{},
+ });
+ return @intCast(index);
+ }
+
+ pub fn addImportStmt(
+ p: *AstBuilder,
+ path: []const u8,
+ identifiers_to_import: anytype,
+ ) ![identifiers_to_import.len]Expr {
+ var out: [identifiers_to_import.len]Expr = undefined;
+
+ const record = try p.addImportRecord(path, .stmt);
+
+ var path_name = bun.fs.PathName.init(path);
+ const name = try strings.append(p.allocator, "import_", try path_name.nonUniqueNameString(p.allocator));
+ const namespace_ref = try p.newSymbol(.other, name);
+
+ const clauses = try p.allocator.alloc(js_ast.ClauseItem, identifiers_to_import.len);
+
+ inline for (identifiers_to_import, &out, clauses) |import_id_untyped, *out_ref, *clause| {
+ const import_id: []const u8 = import_id_untyped; // must be given '[N][]const u8'
+ const ref = try p.newSymbol(.import, import_id);
+ if (p.hot_reloading) {
+ p.getSymbol(ref).namespace_alias = .{
+ .namespace_ref = namespace_ref,
+ .alias = import_id,
+ .import_record_index = record,
+ };
+ }
+ out_ref.* = p.newExpr(E.ImportIdentifier{ .ref = ref });
+ clause.* = .{
+ .name = .{ .loc = Logger.Loc.Empty, .ref = ref },
+ .original_name = import_id,
+ .alias = import_id,
+ };
+ }
+
+ try p.appendStmt(S.Import{
+ .namespace_ref = namespace_ref,
+ .import_record_index = record,
+ .items = clauses,
+ .is_single_line = identifiers_to_import.len < 1,
+ });
+
+ return out;
+ }
+
+ pub fn appendStmt(p: *AstBuilder, data: anytype) !void {
+ try p.stmts.ensureUnusedCapacity(p.allocator, 1);
+ p.stmts.appendAssumeCapacity(p.newStmt(data));
+ }
+
+ pub fn newStmt(p: *AstBuilder, data: anytype) Stmt {
+ _ = p;
+ return Stmt.alloc(@TypeOf(data), data, Logger.Loc.Empty);
+ }
+
+ pub fn newExpr(p: *AstBuilder, data: anytype) Expr {
+ _ = p;
+ return Expr.init(@TypeOf(data), data, Logger.Loc.Empty);
+ }
+
+ pub fn newExternalSymbol(p: *AstBuilder, name: []const u8) !Ref {
+ const ref = try p.newSymbol(.other, name);
+ const sym = p.getSymbol(ref);
+ sym.must_not_be_renamed = true;
+ return ref;
+ }
+
+ pub fn toBundledAst(p: *AstBuilder) !js_ast.BundledAst {
+ // TODO: missing import scanner
+ bun.assert(p.scopes.items.len == 0);
+ const module_scope = p.current_scope;
+
+ var parts = try js_ast.Part.List.initCapacity(p.allocator, 2);
+ parts.len = 2;
+ parts.mut(0).* = .{};
+ parts.mut(1).* = .{
+ .stmts = p.stmts.items,
+ .can_be_removed_if_unused = false,
+
+ // pretend that every symbol was used
+ .symbol_uses = uses: {
+ var map: js_ast.Part.SymbolUseMap = .{};
+ try map.ensureTotalCapacity(p.allocator, p.symbols.items.len);
+ for (0..p.symbols.items.len) |i| {
+ map.putAssumeCapacity(Ref{
+ .tag = .symbol,
+ .source_index = p.source_index,
+ .inner_index = @intCast(i),
+ }, .{ .count_estimate = 1 });
+ }
+ break :uses map;
+ },
+ };
+
+ const single_u32 = try BabyList(u32).fromSlice(p.allocator, &.{1});
+
+ var top_level_symbols_to_parts = js_ast.Ast.TopLevelSymbolToParts{};
+ try top_level_symbols_to_parts.entries.setCapacity(p.allocator, module_scope.generated.len);
+ top_level_symbols_to_parts.entries.len = module_scope.generated.len;
+ const slice = top_level_symbols_to_parts.entries.slice();
+ for (
+ slice.items(.key),
+ slice.items(.value),
+ module_scope.generated.slice(),
+ ) |*k, *v, ref| {
+ k.* = ref;
+ v.* = single_u32;
+ }
+ try top_level_symbols_to_parts.reIndex(p.allocator);
+
+ // For more details on this section, look at js_parser.toAST
+ // This is mimicking how it calls ImportScanner
+ if (p.hot_reloading) {
+ var hmr_transform_ctx = js_parser.ConvertESMExportsForHmr{
+ .last_part = parts.last() orelse
+ unreachable, // was definitely allocated
+ };
+ try hmr_transform_ctx.stmts.ensureTotalCapacity(p.allocator, prealloc_count: {
+ // get a estimate on how many statements there are going to be
+ const count = p.stmts.items.len;
+ break :prealloc_count count + 2;
+ });
+
+ _ = try js_parser.ImportScanner.scan(AstBuilder, p, p.stmts.items, false, true, &hmr_transform_ctx);
+
+ const new_parts = try hmr_transform_ctx.finalize(p, parts.slice());
+ // preserve original capacity
+ parts.len = @intCast(new_parts.len);
+ bun.assert(new_parts.ptr == parts.ptr);
+ } else {
+ const result = try js_parser.ImportScanner.scan(AstBuilder, p, p.stmts.items, false, false, {});
+ parts.mut(1).stmts = result.stmts;
+ }
+
+ parts.mut(1).declared_symbols = p.declared_symbols;
+ parts.mut(1).scopes = p.scopes.items;
+ parts.mut(1).import_record_indices = BabyList(u32).fromList(p.import_records_for_current_part);
+
+ return .{
+ .parts = parts,
+ .module_scope = module_scope.*,
+ .symbols = js_ast.Symbol.List.fromList(p.symbols),
+ .exports_ref = Ref.None,
+ .wrapper_ref = Ref.None,
+ .module_ref = p.module_ref,
+ .import_records = ImportRecord.List.fromList(p.import_records),
+ .export_star_import_records = &.{},
+ .approximate_newline_count = 1,
+ .exports_kind = .esm,
+ .named_imports = p.named_imports,
+ .named_exports = p.named_exports,
+ .top_level_symbols_to_parts = top_level_symbols_to_parts,
+ .char_freq = .{},
+ .flags = .{},
+ // .nested_scope_slot_counts = if (p.options.features.minify_identifiers)
+ // renamer.assignNestedScopeSlots(p.allocator, p.scopes.items[0], p.symbols.items)
+ // else
+ // js_ast.SlotCounts{},
+ };
+ }
+
+ // stub methods for ImportScanner duck typing
+
+ pub fn generateTempRef(ab: *AstBuilder, name: ?[]const u8) Ref {
+ return ab.newSymbol(.other, name orelse "temp") catch bun.outOfMemory();
+ }
+
+ pub fn recordExport(p: *AstBuilder, _: Logger.Loc, alias: []const u8, ref: Ref) !void {
+ if (p.named_exports.get(alias)) |_| {
+ // Duplicate exports are an error
+ Output.panic(
+ "In generated file, duplicate export \"{s}\"",
+ .{alias},
+ );
+ } else {
+ try p.named_exports.put(p.allocator, alias, .{ .alias_loc = Logger.Loc.Empty, .ref = ref });
+ }
+ }
+
+ pub fn recordExportedBinding(p: *AstBuilder, binding: Binding) void {
+ switch (binding.data) {
+ .b_missing => {},
+ .b_identifier => |ident| {
+ p.recordExport(binding.loc, p.symbols.items[ident.ref.innerIndex()].original_name, ident.ref) catch unreachable;
+ },
+ .b_array => |array| {
+ for (array.items) |prop| {
+ p.recordExportedBinding(prop.binding);
+ }
+ },
+ .b_object => |obj| {
+ for (obj.properties) |prop| {
+ p.recordExportedBinding(prop.value);
+ }
+ },
+ }
+ }
+
+ pub fn ignoreUsage(p: *AstBuilder, ref: Ref) void {
+ _ = p;
+ _ = ref;
+ }
+
+ pub fn panic(p: *AstBuilder, comptime fmt: []const u8, args: anytype) noreturn {
+ _ = p;
+ Output.panic(fmt, args);
+ }
+
+ pub fn @"module.exports"(p: *AstBuilder, loc: Logger.Loc) Expr {
+ return p.newExpr(E.Dot{ .name = "exports", .name_loc = loc, .target = p.newExpr(E.Identifier{ .ref = p.module_ref }) });
+ }
+};
diff --git a/src/bunfig.zig b/src/bunfig.zig
index bccc9aa750b35..6ad52eac40a07 100644
--- a/src/bunfig.zig
+++ b/src/bunfig.zig
@@ -684,20 +684,6 @@ pub const Bunfig = struct {
jsx.development = jsx_dev;
}
- switch (comptime cmd) {
- .AutoCommand, .BuildCommand => {
- if (json.get("publicDir")) |public_dir| {
- try this.expectString(public_dir);
- this.bunfig.router = Api.RouteConfig{
- .extensions = &.{},
- .dir = &.{},
- .static_dir = try public_dir.data.e_string.string(allocator),
- };
- }
- },
- else => {},
- }
-
if (json.get("debug")) |expr| {
if (expr.get("editor")) |editor| {
if (editor.asString(allocator)) |value| {
@@ -738,13 +724,6 @@ pub const Bunfig = struct {
}
}
- if (json.get("framework")) |expr| {
- try this.expectString(expr);
- this.bunfig.framework = Api.FrameworkConfig{
- .package = expr.asString(allocator).?,
- };
- }
-
if (json.get("loader")) |expr| {
try this.expect(expr, .e_object);
const properties = expr.data.e_object.properties.slice();
diff --git a/src/cli.zig b/src/cli.zig
index ad59352479eb1..67b44bdfbb775 100644
--- a/src/cli.zig
+++ b/src/cli.zig
@@ -253,7 +253,8 @@ pub const Arguments = struct {
clap.parseParam("--chunk-naming Customize chunk filenames. Defaults to \"[name]-[hash].[ext]\"") catch unreachable,
clap.parseParam("--asset-naming Customize asset filenames. Defaults to \"[name]-[hash].[ext]\"") catch unreachable,
clap.parseParam("--react-fast-refresh Enable React Fast Refresh transform (does not emit hot-module code, use this for testing)") catch unreachable,
- clap.parseParam("--server-components Enable React Server Components (experimental)") catch unreachable,
+ clap.parseParam("--server-components Enable Server Components (experimental)") catch unreachable,
+ clap.parseParam("--define-client ... When --server-components is set, these defines are applied to client components. Same format as --define") catch unreachable,
clap.parseParam("--no-bundle Transpile file only, do not bundle") catch unreachable,
clap.parseParam("--emit-dce-annotations Re-emit DCE annotations in bundles. Enabled by default unless --minify-whitespace is passed.") catch unreachable,
clap.parseParam("--minify Enable all minification flags") catch unreachable,
@@ -856,7 +857,19 @@ pub const Arguments = struct {
}
if (args.flag("--server-components")) {
- ctx.bundler_options.react_server_components = true;
+ if (!bun.FeatureFlags.cli_server_components) {
+ // TODO: i want to disable this in non-canary
+ // but i also want to have tests that can run for PRs
+ }
+ ctx.bundler_options.server_components = true;
+ if (opts.target) |target| {
+ if (!bun.options.Target.from(target).isServerSide()) {
+ bun.Output.errGeneric("Cannot use client-side --target={s} with --server-components", .{@tagName(target)});
+ Global.crash();
+ }
+ } else {
+ opts.target = .bun;
+ }
}
if (args.flag("--react-fast-refresh")) {
@@ -986,7 +999,7 @@ pub const Arguments = struct {
}
if (cmd == .BuildCommand) {
- if (opts.entry_points.len == 0 and opts.framework == null) {
+ if (opts.entry_points.len == 0) {
Output.prettyErrorln("bun build v" ++ Global.package_json_version_with_sha ++ "", .{});
Output.prettyError("error: Missing entrypoints. What would you like to bundle?\n\n", .{});
Output.flush();
@@ -1320,7 +1333,7 @@ pub const Command = struct {
entry_naming: []const u8 = "[dir]/[name].[ext]",
chunk_naming: []const u8 = "./[name]-[hash].[ext]",
asset_naming: []const u8 = "./[name]-[hash].[ext]",
- react_server_components: bool = false,
+ server_components: bool = false,
react_fast_refresh: bool = false,
code_splitting: bool = false,
transform_only: bool = false,
diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig
index f5bb2afd6c141..5630ee9b14950 100644
--- a/src/cli/build_command.zig
+++ b/src/cli/build_command.zig
@@ -30,9 +30,7 @@ const bundler = bun.bundler;
const DotEnv = @import("../env_loader.zig");
const fs = @import("../fs.zig");
-const Router = @import("../router.zig");
const BundleV2 = @import("../bundler/bundle_v2.zig").BundleV2;
-var estimated_input_lines_of_code_: usize = undefined;
pub const BuildCommand = struct {
const compile_define_keys = &.{
@@ -40,13 +38,10 @@ pub const BuildCommand = struct {
"process.arch",
};
- pub fn exec(
- ctx: Command.Context,
- ) !void {
+ pub fn exec(ctx: Command.Context) !void {
Global.configureAllocator(.{ .long_running = true });
const allocator = ctx.allocator;
var log = ctx.log;
- estimated_input_lines_of_code_ = 0;
if (ctx.bundler_options.compile) {
// set this early so that externals are set up correctly and define is right
ctx.args.target = .bun;
@@ -56,12 +51,7 @@ pub const BuildCommand = struct {
if (ctx.bundler_options.compile) {
const compile_define_values = compile_target.defineValues();
- if (ctx.args.define == null) {
- ctx.args.define = .{
- .keys = compile_define_keys,
- .values = compile_define_values,
- };
- } else if (ctx.args.define) |*define| {
+ if (ctx.args.define) |*define| {
var keys = try std.ArrayList(string).initCapacity(bun.default_allocator, compile_define_keys.len + define.keys.len);
keys.appendSliceAssumeCapacity(compile_define_keys);
keys.appendSliceAssumeCapacity(define.keys);
@@ -71,6 +61,11 @@ pub const BuildCommand = struct {
define.keys = keys.items;
define.values = values.items;
+ } else {
+ ctx.args.define = .{
+ .keys = compile_define_keys,
+ .values = compile_define_values,
+ };
}
}
@@ -85,13 +80,14 @@ pub const BuildCommand = struct {
Global.exit(1);
return;
}
+
var outfile = ctx.bundler_options.outfile;
this_bundler.options.public_path = ctx.bundler_options.public_path;
this_bundler.options.entry_naming = ctx.bundler_options.entry_naming;
this_bundler.options.chunk_naming = ctx.bundler_options.chunk_naming;
this_bundler.options.asset_naming = ctx.bundler_options.asset_naming;
- this_bundler.options.react_server_components = ctx.bundler_options.react_server_components;
+ this_bundler.options.server_components = ctx.bundler_options.server_components;
this_bundler.options.react_fast_refresh = ctx.bundler_options.react_fast_refresh;
this_bundler.options.inline_entrypoint_import_meta_main = ctx.bundler_options.inline_entrypoint_import_meta_main;
this_bundler.options.code_splitting = ctx.bundler_options.code_splitting;
@@ -100,6 +96,12 @@ pub const BuildCommand = struct {
this_bundler.options.minify_identifiers = ctx.bundler_options.minify_identifiers;
this_bundler.options.emit_dce_annotations = ctx.bundler_options.emit_dce_annotations;
this_bundler.options.ignore_dce_annotations = ctx.bundler_options.ignore_dce_annotations;
+ this_bundler.options.output_dir = ctx.bundler_options.outdir;
+ this_bundler.options.output_format = ctx.bundler_options.output_format;
+
+ if (ctx.bundler_options.output_format == .internal_kit_dev) {
+ this_bundler.options.tree_shaking = false;
+ }
if (ctx.bundler_options.compile) {
if (ctx.bundler_options.code_splitting) {
@@ -161,9 +163,6 @@ pub const BuildCommand = struct {
}
}
- this_bundler.options.output_dir = ctx.bundler_options.outdir;
- this_bundler.options.output_format = ctx.bundler_options.output_format;
-
var src_root_dir_buf: bun.PathBuffer = undefined;
const src_root_dir: string = brk1: {
const path = brk2: {
@@ -194,28 +193,10 @@ pub const BuildCommand = struct {
this_bundler.options.code_splitting = ctx.bundler_options.code_splitting;
this_bundler.options.transform_only = ctx.bundler_options.transform_only;
- if (this_bundler.options.transform_only) {
- this_bundler.options.resolve_mode = .disable;
- }
-
- this_bundler.resolver.opts = this_bundler.options;
-
+ try this_bundler.configureDefines();
this_bundler.configureLinker();
- // This step is optional
- // If it fails for any reason, ignore it and continue bundling
- // This is partially a workaround for the 'error.MissingRoutesDir' error
- this_bundler.configureRouter(true) catch {
- this_bundler.options.routes.routes_enabled = false;
- this_bundler.options.framework = null;
- if (this_bundler.router) |*router| {
- router.config.routes_enabled = false;
- router.config.single_page_app_routing = false;
- router.config.static_dir_enabled = false;
- this_bundler.router = null;
- }
- };
-
+ this_bundler.resolver.opts = this_bundler.options;
this_bundler.options.jsx.development = !this_bundler.options.production;
this_bundler.resolver.opts.jsx.development = this_bundler.options.jsx.development;
@@ -229,6 +210,37 @@ pub const BuildCommand = struct {
.unspecified => {},
}
+ var client_bundler: bundler.Bundler = undefined;
+ if (this_bundler.options.server_components) {
+ client_bundler = try bundler.Bundler.init(allocator, log, ctx.args, null);
+ client_bundler.options = this_bundler.options;
+ client_bundler.options.target = .browser;
+ client_bundler.options.server_components = true;
+ try this_bundler.options.conditions.appendSlice(&.{"react-server"});
+ this_bundler.options.react_fast_refresh = false;
+ this_bundler.options.minify_syntax = true;
+ client_bundler.options.minify_syntax = true;
+ client_bundler.options.define = try options.Define.init(
+ allocator,
+ if (ctx.args.define) |user_defines|
+ try options.Define.Data.fromInput(try options.stringHashMapFromArrays(
+ options.defines.RawDefines,
+ allocator,
+ user_defines.keys,
+ user_defines.values,
+ ), log, allocator)
+ else
+ null,
+ null,
+ );
+
+ try bun.kit.addImportMetaDefines(allocator, this_bundler.options.define, .development, .server);
+ try bun.kit.addImportMetaDefines(allocator, client_bundler.options.define, .development, .client);
+
+ this_bundler.resolver.opts = this_bundler.options;
+ client_bundler.resolver.opts = client_bundler.options;
+ }
+
// var env_loader = this_bundler.env;
if (ctx.debug.dump_environment_variables) {
@@ -268,6 +280,7 @@ pub const BuildCommand = struct {
break :brk (BundleV2.generateFromCLI(
&this_bundler,
+ if (this_bundler.options.server_components) @panic("TODO") else null,
allocator,
bun.JSC.AnyEventLoop.init(ctx.allocator),
std.crypto.random.int(u64),
@@ -284,7 +297,6 @@ pub const BuildCommand = struct {
Output.flush();
exitOrWatch(1, ctx.debug.hot_reload == .watch);
- unreachable;
}).items;
};
const bundled_end = std.time.nanoTimestamp();
@@ -519,7 +531,7 @@ pub const BuildCommand = struct {
}
};
-fn exitOrWatch(code: u8, watch: bool) void {
+fn exitOrWatch(code: u8, watch: bool) noreturn {
if (watch) {
// the watcher thread will exit the process
std.time.sleep(std.math.maxInt(u64) - 1);
diff --git a/src/codegen/kit-codegen.ts b/src/codegen/kit-codegen.ts
index d3bb39f2da29d..2c122c61982fc 100644
--- a/src/codegen/kit-codegen.ts
+++ b/src/codegen/kit-codegen.ts
@@ -41,10 +41,14 @@ const results = await Promise.allSettled(
// @ts-ignore
let code = await result.outputs[0].text();
- // A second pass is used to convert global variables into parameters, while
- // allowing for renaming to properly function when minification is enabled.
- const in_names = ["input_graph", "config", mode === "server" && "server_fetch_function"].filter(Boolean);
- const combined_source = `
+ // A second pass is used to convert global variables into parameters, while
+ // allowing for renaming to properly function when minification is enabled.
+ const in_names = [
+ 'input_graph',
+ 'config',
+ mode === 'server' && 'server_exports'
+ ].filter(Boolean);
+ const combined_source = `
__marker__;
let ${in_names.join(",")};
__marker__(${in_names.join(",")});
diff --git a/src/crash_handler.zig b/src/crash_handler.zig
index f7d77d3d6e75f..39dc6d68c9b15 100644
--- a/src/crash_handler.zig
+++ b/src/crash_handler.zig
@@ -137,10 +137,10 @@ pub const Action = union(enum) {
\\ part range: {d}..{d}
,
.{
- data.linkerContext().graph.bundler_graph.input_files
+ data.linkerContext().parse_graph.input_files
.items(.source)[data.chunk.entry_point.source_index]
.path.text,
- data.linkerContext().graph.bundler_graph.input_files
+ data.linkerContext().parse_graph.input_files
.items(.source)[data.part_range.source_index.get()]
.path.text,
data.part_range.part_index_begin,
diff --git a/src/css/writer.zig b/src/css/writer.zig
deleted file mode 100644
index c557d9982e3b7..0000000000000
--- a/src/css/writer.zig
+++ /dev/null
@@ -1,68 +0,0 @@
-const std = @import("std");
-const Allocator = std.mem.Allocator;
-const bun = @import("root").bun;
-const logger = bun.logger;
-const Log = logger.Log;
-
-pub const css = @import("./css_parser.zig");
-
-/// Wrapper around std.io.GenericWriter
-pub fn GenericWriter(
- comptime Context: type,
- comptime WriteError: type,
- comptime writeFn: fn (context: Context, bytes: []const u8) WriteError!usize,
-) type {
- return struct {
- context: Context,
-
- const Self = @This();
- pub const Error = css.PrintErr;
-
- pub inline fn write(self: Self, bytes: []const u8) Error!usize {
- return writeFn(self.context, bytes) catch ;
- }
-
- pub inline fn writeAll(self: Self, bytes: []const u8) Error!void {
- return @errorCast(self.any().writeAll(bytes));
- }
-
- pub inline fn print(self: Self, comptime format: []const u8, args: anytype) Error!void {
- return @errorCast(self.any().print(format, args));
- }
-
- pub inline fn writeByte(self: Self, byte: u8) Error!void {
- return @errorCast(self.any().writeByte(byte));
- }
-
- pub inline fn writeByteNTimes(self: Self, byte: u8, n: usize) Error!void {
- return @errorCast(self.any().writeByteNTimes(byte, n));
- }
-
- pub inline fn writeBytesNTimes(self: Self, bytes: []const u8, n: usize) Error!void {
- return @errorCast(self.any().writeBytesNTimes(bytes, n));
- }
-
- pub inline fn writeInt(self: Self, comptime T: type, value: T, endian: std.builtin.Endian) Error!void {
- return @errorCast(self.any().writeInt(T, value, endian));
- }
-
- pub inline fn writeStruct(self: Self, value: anytype) Error!void {
- return @errorCast(self.any().writeStruct(value));
- }
-
- pub inline fn writeStructEndian(self: Self, value: anytype, endian: std.builtin.Endian) Error!void {
- return @errorCast(self.any().writeStructEndian(value, endian));
- }
-
- pub inline fn any(self: *const Self) AnyWriter {
- return .{
- .context = @ptrCast(&self.context),
- .writeFn = typeErasedWriteFn,
- };
- }
-
- fn typeErasedWriteFn(context: *const anyopaque, bytes: []const u8) anyerror!usize {
- const ptr: *const Context = @alignCast(@ptrCast(context));
- return writeFn(ptr.*, bytes);
- }
- };
diff --git a/src/darwin_c.zig b/src/darwin_c.zig
index cfea6b1f38671..6771f06ad8bce 100644
--- a/src/darwin_c.zig
+++ b/src/darwin_c.zig
@@ -826,9 +826,9 @@ pub const sockaddr_dl = extern struct {
sdl_slen: u8, // link layer selector length */
sdl_data: [12]u8, // minimum work area, can be larger; contains both if name and ll address */
//#ifndef __APPLE__
- // /* For TokenRing */
- // u_short sdl_rcf; /* source routing control */
- // u_short sdl_route[16]; /* source routing information */
+ // /* For TokenRing */
+ // u_short sdl_rcf; /* source routing control */
+ // u_short sdl_route[16]; /* source routing information */
//#endif
};
diff --git a/src/defines.zig b/src/defines.zig
index 43cdd144b6cce..504585e39e67c 100644
--- a/src/defines.zig
+++ b/src/defines.zig
@@ -236,7 +236,7 @@ pub const Define = struct {
}
}
- pub fn init(allocator: std.mem.Allocator, _user_defines: ?UserDefines, string_defines: ?UserDefinesArray) std.mem.Allocator.Error!*@This() {
+ pub fn init(allocator: std.mem.Allocator, _user_defines: ?UserDefines, string_defines: ?UserDefinesArray) bun.OOM!*@This() {
var define = try allocator.create(Define);
define.allocator = allocator;
define.identifiers = bun.StringHashMap(IdentifierDefine).init(allocator);
diff --git a/src/feature_flags.zig b/src/feature_flags.zig
index 9930474c10d3f..c02c0ac35f824 100644
--- a/src/feature_flags.zig
+++ b/src/feature_flags.zig
@@ -158,7 +158,11 @@ pub fn isLibdeflateEnabled() bool {
return !bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_NO_LIBDEFLATE");
}
-/// Enable experimental bundler tools, codenamed "bun kit"
+/// Enable Bun Kit's experimental bundler tools
pub const kit = env.is_canary or env.isDebug;
+/// Enable --server-components
+pub const cli_server_components = kit;
+
+/// Enable CSS handling in `bun build`
pub const css = env.is_canary or env.isDebug;
diff --git a/src/fs.zig b/src/fs.zig
index 20c9317d6f526..d0628f5019be3 100644
--- a/src/fs.zig
+++ b/src/fs.zig
@@ -1403,7 +1403,7 @@ pub const FileSystem = struct {
return cache;
}
- // // Stores the file entries for directories we've listed before
+ // // Stores the file entries for directories we've listed before
// entries_mutex: std.Mutex
// entries map[string]entriesOrErr
@@ -1627,7 +1627,9 @@ threadlocal var join_buf: [1024]u8 = undefined;
pub const Path = struct {
pretty: string,
text: string,
+ // TODO(@paperdave): remove the default of this field.
namespace: string = "unspecified",
+ // TODO(@paperdave): investigate removing or simplifying this property
name: PathName,
is_disabled: bool = false,
is_symlink: bool = false,
@@ -1855,9 +1857,9 @@ pub const Path = struct {
};
}
- pub fn initWithNamespaceVirtual(comptime text: string, comptime namespace: string, comptime package: string) Path {
- return Path{
- .pretty = comptime "node:" ++ package,
+ pub inline fn initWithNamespaceVirtual(comptime text: string, comptime namespace: string, comptime package: string) Path {
+ return comptime Path{
+ .pretty = namespace ++ ":" ++ package,
.is_symlink = true,
.text = text,
.namespace = namespace,
@@ -1865,6 +1867,16 @@ pub const Path = struct {
};
}
+ pub inline fn initWithNamespaceComptime(comptime namespace: string, comptime package: string) Path {
+ return comptime Path{
+ .pretty = namespace ++ ":" ++ package,
+ .is_symlink = true,
+ .text = package,
+ .namespace = namespace,
+ .name = PathName.init(package),
+ };
+ }
+
pub fn isBefore(a: *Path, b: Path) bool {
return a.namespace > b.namespace ||
(a.namespace == b.namespace and (a.text < b.text ||
diff --git a/src/import_record.zig b/src/import_record.zig
index c21e2223c8f18..7e5ceba4a191c 100644
--- a/src/import_record.zig
+++ b/src/import_record.zig
@@ -7,27 +7,20 @@ const Index = @import("ast/base.zig").Index;
const Api = @import("./api/schema.zig").Api;
pub const ImportKind = enum(u8) {
- // An entry point provided by the user
+ /// An entry point provided by the user
entry_point,
-
- // An ES6 import or re-export statement
+ /// An ES6 import or re-export statement
stmt,
-
- // A call to "require()"
+ /// A call to "require()"
require,
-
- // An "import()" expression with a string argument
+ /// An "import()" expression with a string argument
dynamic,
-
/// A call to "require.resolve()"
require_resolve,
-
/// A CSS "@import" rule
at,
-
/// A CSS "@import" rule with import conditions
at_conditional,
-
/// A CSS "url(...)" token
url,
@@ -103,9 +96,8 @@ pub const ImportKind = enum(u8) {
pub const ImportRecord = struct {
range: logger.Range,
path: fs.Path,
-
- /// 0 is invalid
- module_id: u32 = 0,
+ kind: ImportKind,
+ tag: Tag = .none,
source_index: Index = Index.invalid,
@@ -147,9 +139,6 @@ pub const ImportRecord = struct {
/// calling the "__reExport()" helper function
calls_runtime_re_export_fn: bool = false,
- /// Tell the printer to use runtime code to resolve this import/export
- do_commonjs_transform_in_printer: bool = false,
-
/// True for require calls like this: "try { require() } catch {}". In this
/// case we shouldn't generate an error if the path could not be resolved.
is_inside_try_body: bool = false,
@@ -165,10 +154,6 @@ pub const ImportRecord = struct {
/// If true, this import can be removed if it's unused
is_external_without_side_effects: bool = false,
- kind: ImportKind,
-
- tag: Tag = Tag.none,
-
/// Tell the printer to print the record as "foo:my-path" instead of "path"
/// where "foo" is the namespace
///
@@ -185,32 +170,31 @@ pub const ImportRecord = struct {
}
pub const Tag = enum {
+ /// A normal import to a user's source file
none,
- /// JSX auto-import for React Fast Refresh
- react_refresh,
- /// JSX auto-import for jsxDEV or jsx
- jsx_import,
- /// JSX auto-import for Fragment or createElement
- jsx_classic,
- /// Uses the `bun` import specifier
- /// import {foo} from "bun";
+ /// An import to 'bun'
bun,
- /// Uses the `bun:test` import specifier
- /// import {expect} from "bun:test";
+ /// An import to 'bun:test'
bun_test,
+ /// A builtin module, such as `node:fs` or `bun:sqlite`
+ builtin,
+ /// An import to the internal runtime
runtime,
- hardcoded,
- /// A macro: import specifier OR a macro import
+ /// A 'macro:' import namespace or 'with { type: "macro" }'
macro,
- internal,
- /// Referenced "use client"; at the start of the file
+ // TODO: evaluate if the following two can be deleted
+ /// The imported file has "use client" at the start. This is
+ /// a boundary from server -> client side.
react_client_component,
-
- /// A file starting with "use client"; imported a server entry point
- /// We don't actually support this right now.
+ /// The imported file has "use server" at the start. This is
+ /// a boundary from client -> server side.
react_server_component,
+ /// For Bun Kit, if a module in the server graph should actually
+ /// crossover to the SSR graph. See kit.Framework.ServerComponents.separate_ssr_graph
+ kit_resolve_to_ssr_graph,
+
with_type_sqlite,
with_type_sqlite_embedded,
with_type_text,
@@ -239,14 +223,18 @@ pub const ImportRecord = struct {
pub fn isSQLite(this: Tag) bool {
return switch (this) {
- .with_type_sqlite, .with_type_sqlite_embedded => true,
+ .with_type_sqlite,
+ .with_type_sqlite_embedded,
+ => true,
else => false,
};
}
pub fn isReactReference(this: Tag) bool {
return switch (this) {
- .react_client_component, .react_server_component => true,
+ .react_client_component,
+ .react_server_component,
+ => true,
else => false,
};
}
@@ -261,8 +249,8 @@ pub const ImportRecord = struct {
pub fn useDirective(this: Tag) bun.JSAst.UseDirective {
return switch (this) {
- .react_client_component => .@"use client",
- .react_server_component => .@"use server",
+ .react_client_component => .client,
+ .react_server_component => .server,
else => .none,
};
}
diff --git a/src/js_ast.zig b/src/js_ast.zig
index bbd318cbe6093..6389df833ad48 100644
--- a/src/js_ast.zig
+++ b/src/js_ast.zig
@@ -12,8 +12,8 @@ const MutableString = bun.MutableString;
const stringZ = bun.stringZ;
const default_allocator = bun.default_allocator;
const C = bun.C;
-const Ref = @import("ast/base.zig").Ref;
-const Index = @import("ast/base.zig").Index;
+pub const Ref = @import("ast/base.zig").Ref;
+pub const Index = @import("ast/base.zig").Index;
const RefHashCtx = @import("ast/base.zig").RefHashCtx;
const ObjectPool = @import("./pool.zig").ObjectPool;
const ImportRecord = @import("import_record.zig").ImportRecord;
@@ -552,7 +552,7 @@ pub const B = union(Binding.Tag) {
};
pub const ClauseItem = struct {
- alias: string = "",
+ alias: string,
alias_loc: logger.Loc = logger.Loc.Empty,
name: LocRef,
@@ -838,16 +838,15 @@ pub const G = struct {
};
pub const Property = struct {
-
- // This is used when parsing a pattern that uses default values:
- //
- // [a = 1] = [];
- // ({a = 1} = {});
- //
- // It's also used for class fields:
- //
- // class Foo { a = 1 }
- //
+ /// This is used when parsing a pattern that uses default values:
+ ///
+ /// [a = 1] = [];
+ /// ({a = 1} = {});
+ ///
+ /// It's also used for class fields:
+ ///
+ /// class Foo { a = 1 }
+ ///
initializer: ?ExprNodeIndex = null,
kind: Kind = .normal,
flags: Flags.Property.Set = Flags.Property.None,
@@ -1153,55 +1152,53 @@ pub const Symbol = struct {
}
pub const Kind = enum {
-
- // An unbound symbol is one that isn't declared in the file it's referenced
- // in. For example, using "window" without declaring it will be unbound.
+ /// An unbound symbol is one that isn't declared in the file it's referenced
+ /// in. For example, using "window" without declaring it will be unbound.
unbound,
- // This has special merging behavior. You're allowed to re-declare these
- // symbols more than once in the same scope. These symbols are also hoisted
- // out of the scope they are declared in to the closest containing function
- // or module scope. These are the symbols with this kind:
- //
- // - Function arguments
- // - Function statements
- // - Variables declared using "var"
- //
+ /// This has special merging behavior. You're allowed to re-declare these
+ /// symbols more than once in the same scope. These symbols are also hoisted
+ /// out of the scope they are declared in to the closest containing function
+ /// or module scope. These are the symbols with this kind:
+ ///
+ /// - Function arguments
+ /// - Function statements
+ /// - Variables declared using "var"
hoisted,
hoisted_function,
- // There's a weird special case where catch variables declared using a simple
- // identifier (i.e. not a binding pattern) block hoisted variables instead of
- // becoming an error:
- //
- // var e = 0;
- // try { throw 1 } catch (e) {
- // print(e) // 1
- // var e = 2
- // print(e) // 2
- // }
- // print(e) // 0 (since the hoisting stops at the catch block boundary)
- //
- // However, other forms are still a syntax error:
- //
- // try {} catch (e) { let e }
- // try {} catch ({e}) { var e }
- //
- // This symbol is for handling this weird special case.
+ /// There's a weird special case where catch variables declared using a simple
+ /// identifier (i.e. not a binding pattern) block hoisted variables instead of
+ /// becoming an error:
+ ///
+ /// var e = 0;
+ /// try { throw 1 } catch (e) {
+ /// print(e) // 1
+ /// var e = 2
+ /// print(e) // 2
+ /// }
+ /// print(e) // 0 (since the hoisting stops at the catch block boundary)
+ ///
+ /// However, other forms are still a syntax error:
+ ///
+ /// try {} catch (e) { let e }
+ /// try {} catch ({e}) { var e }
+ ///
+ /// This symbol is for handling this weird special case.
catch_identifier,
- // Generator and async functions are not hoisted, but still have special
- // properties such as being able to overwrite previous functions with the
- // same name
+ /// Generator and async functions are not hoisted, but still have special
+ /// properties such as being able to overwrite previous functions with the
+ /// same name
generator_or_async_function,
- // This is the special "arguments" variable inside functions
+ /// This is the special "arguments" variable inside functions
arguments,
- // Classes can merge with TypeScript namespaces.
+ /// Classes can merge with TypeScript namespaces.
class,
- // A class-private identifier (i.e. "#foo").
+ /// A class-private identifier (i.e. "#foo").
private_field,
private_method,
private_get,
@@ -1213,25 +1210,26 @@ pub const Symbol = struct {
private_static_set,
private_static_get_set_pair,
- // Labels are in their own namespace
+ /// Labels are in their own namespace
label,
- // TypeScript enums can merge with TypeScript namespaces and other TypeScript
- // enums.
+ /// TypeScript enums can merge with TypeScript namespaces and other TypeScript
+ /// enums.
ts_enum,
- // TypeScript namespaces can merge with classes, functions, TypeScript enums,
- // and other TypeScript namespaces.
+ /// TypeScript namespaces can merge with classes, functions, TypeScript enums,
+ /// and other TypeScript namespaces.
ts_namespace,
- // In TypeScript, imports are allowed to silently collide with symbols within
- // the module. Presumably this is because the imports may be type-only.
+ /// In TypeScript, imports are allowed to silently collide with symbols within
+ /// the module. Presumably this is because the imports may be type-only.
+ /// Import statement namespace references should NOT have this set.
import,
- // Assigning to a "const" symbol will throw a TypeError at runtime
+ /// Assigning to a "const" symbol will throw a TypeError at runtime
constant,
- // This annotates all other symbols that don't have special behavior.
+ /// This annotates all other symbols that don't have special behavior.
other,
pub fn jsonStringify(self: @This(), writer: anytype) !void {
@@ -1294,7 +1292,7 @@ pub const Symbol = struct {
// single inner array, so you can join the maps together by just make a
// single outer array containing all of the inner arrays. See the comment on
// "Ref" for more detail.
- symbols_for_source: NestedList = NestedList{},
+ symbols_for_source: NestedList = .{},
pub fn dump(this: Map) void {
defer Output.flush();
@@ -1428,21 +1426,6 @@ pub const Symbol = struct {
pub inline fn isHoisted(self: *const Symbol) bool {
return Symbol.isKindHoisted(self.kind);
}
-
- pub fn isReactComponentishName(symbol: *const Symbol) bool {
- switch (symbol.kind) {
- .hoisted, .hoisted_function, .constant, .class, .other => {
- return switch (symbol.original_name[0]) {
- 'A'...'Z' => true,
- else => false,
- };
- },
-
- else => {
- return false;
- },
- }
- }
};
pub const OptionalChain = enum(u1) {
@@ -3257,6 +3240,14 @@ pub const Stmt = struct {
};
};
+ pub fn StoredData(tag: Tag) type {
+ const T = std.meta.FieldType(Data, tag);
+ return switch (@typeInfo(T)) {
+ .Pointer => |ptr| ptr.child,
+ else => T,
+ };
+ }
+
pub fn caresAboutScope(self: *Stmt) bool {
return switch (self.data) {
.s_block, .s_empty, .s_debugger, .s_expr, .s_if, .s_for, .s_for_in, .s_for_of, .s_do_while, .s_while, .s_with, .s_try, .s_switch, .s_return, .s_throw, .s_break, .s_continue, .s_directive => {
@@ -6265,6 +6256,14 @@ pub const Expr = struct {
return @as(Expr.Tag, self) == .e_string;
}
};
+
+ pub fn StoredData(tag: Tag) type {
+ const T = std.meta.FieldType(Data, tag);
+ return switch (@typeInfo(T)) {
+ .Pointer => |ptr| ptr.child,
+ else => T,
+ };
+ }
};
pub const EnumValue = struct {
@@ -6437,7 +6436,7 @@ pub const S = struct {
// when converting this module to a CommonJS module.
namespace_ref: Ref,
default_name: ?LocRef = null,
- items: []ClauseItem = &([_]ClauseItem{}),
+ items: []ClauseItem = &.{},
star_name_loc: ?logger.Loc = null,
import_record_index: u32,
is_single_line: bool = false,
@@ -6806,7 +6805,6 @@ pub const Ast = struct {
runtime_import_record_id: ?u32 = null,
needs_runtime: bool = false,
- externals: []u32 = &[_]u32{},
// This is a list of CommonJS features. When a file uses CommonJS features,
// it's not a candidate for "flat bundling" and must be wrapped in its own
// closure.
@@ -6831,7 +6829,6 @@ pub const Ast = struct {
hashbang: string = "",
directive: ?string = null,
- url_for_css: ?string = null,
parts: Part.List = Part.List{},
// This list may be mutated later, so we should store the capacity
symbols: Symbol.List = Symbol.List{},
@@ -6847,11 +6844,11 @@ pub const Ast = struct {
// These are used when bundling. They are filled in during the parser pass
// since we already have to traverse the AST then anyway and the parser pass
// is conveniently fully parallelized.
- named_imports: NamedImports = NamedImports.init(bun.failing_allocator),
- named_exports: NamedExports = NamedExports.init(bun.failing_allocator),
+ named_imports: NamedImports = .{},
+ named_exports: NamedExports = .{},
export_star_import_records: []u32 = &([_]u32{}),
- allocator: std.mem.Allocator,
+ // allocator: std.mem.Allocator,
top_level_symbols_to_parts: TopLevelSymbolToParts = .{},
commonjs_named_exports: CommonJSNamedExports = .{},
@@ -6875,15 +6872,14 @@ pub const Ast = struct {
};
pub const CommonJSNamedExports = bun.StringArrayHashMapUnmanaged(CommonJSNamedExport);
- pub const NamedImports = std.ArrayHashMap(Ref, NamedImport, RefHashCtx, true);
- pub const NamedExports = bun.StringArrayHashMap(NamedExport);
+ pub const NamedImports = std.ArrayHashMapUnmanaged(Ref, NamedImport, RefHashCtx, true);
+ pub const NamedExports = bun.StringArrayHashMapUnmanaged(NamedExport);
pub const ConstValuesMap = std.ArrayHashMapUnmanaged(Ref, Expr, RefHashCtx, false);
pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged(InlinedEnumValue), RefHashCtx, false);
pub fn fromParts(parts: []Part) Ast {
return Ast{
.parts = Part.List.init(parts),
- .allocator = bun.default_allocator,
.runtime_imports = .{},
};
}
@@ -6891,12 +6887,11 @@ pub const Ast = struct {
pub fn initTest(parts: []Part) Ast {
return Ast{
.parts = Part.List.init(parts),
- .allocator = bun.default_allocator,
.runtime_imports = .{},
};
}
- pub const empty = Ast{ .parts = Part.List{}, .runtime_imports = .{}, .allocator = bun.default_allocator };
+ pub const empty = Ast{ .parts = Part.List{}, .runtime_imports = .{} };
pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void {
const opts = std.json.StringifyOptions{ .whitespace = std.json.StringifyOptions.Whitespace{
@@ -6909,7 +6904,6 @@ pub const Ast = struct {
pub fn deinit(this: *Ast) void {
// TODO: assert mimalloc-owned memory
if (this.parts.len > 0) this.parts.deinitWithAllocator(bun.default_allocator);
- if (this.externals.len > 0) bun.default_allocator.free(this.externals);
if (this.symbols.len > 0) this.symbols.deinitWithAllocator(bun.default_allocator);
if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator);
}
@@ -6924,22 +6918,18 @@ pub const Ast = struct {
/// So we make a slimmer version of Ast for bundling that doesn't allocate as much memory
pub const BundledAst = struct {
approximate_newline_count: u32 = 0,
- nested_scope_slot_counts: SlotCounts = SlotCounts{},
- externals: []u32 = &[_]u32{},
+ nested_scope_slot_counts: SlotCounts = .{},
- exports_kind: ExportsKind = ExportsKind.none,
+ exports_kind: ExportsKind = .none,
/// These are stored at the AST level instead of on individual AST nodes so
/// they can be manipulated efficiently without a full AST traversal
import_records: ImportRecord.List = .{},
hashbang: string = "",
- directive: string = "",
- url_for_css: string = "",
- parts: Part.List = Part.List{},
- // This list may be mutated later, so we should store the capacity
- symbols: Symbol.List = Symbol.List{},
- module_scope: Scope = Scope{},
+ parts: Part.List = .{},
+ symbols: Symbol.List = .{},
+ module_scope: Scope = .{},
char_freq: CharFreq = undefined,
exports_ref: Ref = Ref.None,
module_ref: Ref = Ref.None,
@@ -6949,18 +6939,19 @@ pub const BundledAst = struct {
// These are used when bundling. They are filled in during the parser pass
// since we already have to traverse the AST then anyway and the parser pass
// is conveniently fully parallelized.
- named_imports: NamedImports = NamedImports.init(bun.failing_allocator),
- named_exports: NamedExports = NamedExports.init(bun.failing_allocator),
- export_star_import_records: []u32 = &([_]u32{}),
+ named_imports: NamedImports = .{},
+ named_exports: NamedExports = .{},
+ export_star_import_records: []u32 = &.{},
- allocator: std.mem.Allocator,
top_level_symbols_to_parts: TopLevelSymbolToParts = .{},
commonjs_named_exports: CommonJSNamedExports = .{},
redirect_import_record_index: u32 = std.math.maxInt(u32),
- /// Only populated when bundling
+ /// Only populated when bundling. When --server-components is passed, this
+ /// will be .browser when it is a client component, and the server's target
+ /// on the server.
target: bun.options.Target = .browser,
// const_values: ConstValuesMap = .{},
@@ -6996,15 +6987,12 @@ pub const BundledAst = struct {
return .{
.approximate_newline_count = this.approximate_newline_count,
.nested_scope_slot_counts = this.nested_scope_slot_counts,
- .externals = this.externals,
.exports_kind = this.exports_kind,
.import_records = this.import_records,
.hashbang = this.hashbang,
- .directive = this.directive,
- // .url_for_css = this.url_for_css,
.parts = this.parts,
// This list may be mutated later, so we should store the capacity
.symbols = this.symbols,
@@ -7022,7 +7010,6 @@ pub const BundledAst = struct {
.named_exports = this.named_exports,
.export_star_import_records = this.export_star_import_records,
- .allocator = this.allocator,
.top_level_symbols_to_parts = this.top_level_symbols_to_parts,
.commonjs_named_exports = this.commonjs_named_exports,
@@ -7048,14 +7035,12 @@ pub const BundledAst = struct {
return .{
.approximate_newline_count = @as(u32, @truncate(ast.approximate_newline_count)),
.nested_scope_slot_counts = ast.nested_scope_slot_counts,
- .externals = ast.externals,
.exports_kind = ast.exports_kind,
.import_records = ast.import_records,
.hashbang = ast.hashbang,
- .directive = ast.directive orelse "",
// .url_for_css = ast.url_for_css orelse "",
.parts = ast.parts,
// This list may be mutated later, so we should store the capacity
@@ -7074,7 +7059,7 @@ pub const BundledAst = struct {
.named_exports = ast.named_exports,
.export_star_import_records = ast.export_star_import_records,
- .allocator = ast.allocator,
+ // .allocator = ast.allocator,
.top_level_symbols_to_parts = ast.top_level_symbols_to_parts,
.commonjs_named_exports = ast.commonjs_named_exports,
@@ -7110,27 +7095,27 @@ pub const Span = struct {
/// block are merged into a single namespace while the non-exported code is
/// still scoped to just within that block:
///
-/// let x = 1;
-/// namespace Foo {
-/// let x = 2;
-/// export let y = 3;
-/// }
-/// namespace Foo {
-/// console.log(x); // 1
-/// console.log(y); // 3
-/// }
+/// let x = 1;
+/// namespace Foo {
+/// let x = 2;
+/// export let y = 3;
+/// }
+/// namespace Foo {
+/// console.log(x); // 1
+/// console.log(y); // 3
+/// }
///
/// Doing this also works inside an enum:
///
-/// enum Foo {
-/// A = 3,
-/// B = A + 1,
-/// }
-/// enum Foo {
-/// C = A + 2,
-/// }
-/// console.log(Foo.B) // 4
-/// console.log(Foo.C) // 5
+/// enum Foo {
+/// A = 3,
+/// B = A + 1,
+/// }
+/// enum Foo {
+/// C = A + 2,
+/// }
+/// console.log(Foo.B) // 4
+/// console.log(Foo.C) // 5
///
/// This is a form of identifier lookup that works differently than the
/// hierarchical scope-based identifier lookup in JavaScript. Lookup now needs
@@ -8468,14 +8453,22 @@ pub const ASTMemoryAllocator = struct {
}
};
-pub const UseDirective = enum {
+pub const UseDirective = enum(u2) {
+ // TODO: Remove this, and provide `UseDirective.Optional` instead
none,
- @"use client",
- @"use server",
+ /// "use client"
+ client,
+ /// "use server"
+ server,
+
+ pub const Boundering = enum(u2) {
+ client = @intFromEnum(UseDirective.client),
+ server = @intFromEnum(UseDirective.server),
+ };
pub const Flags = struct {
- is_client: bool = false,
- is_server: bool = false,
+ has_any_client: bool = false,
+ has_any_server: bool = false,
};
pub fn isBoundary(this: UseDirective, other: UseDirective) bool {
@@ -8485,22 +8478,13 @@ pub const UseDirective = enum {
return true;
}
- pub fn boundering(this: UseDirective, other: UseDirective) ?UseDirective {
+ pub fn boundering(this: UseDirective, other: UseDirective) ?Boundering {
if (this == other or other == .none)
return null;
-
- return other;
+ return @enumFromInt(@intFromEnum(other));
}
- pub const EntryPoint = struct {
- source_index: Index.Int,
- use_directive: UseDirective,
- };
-
- pub const List = std.MultiArrayList(UseDirective.EntryPoint);
-
- // TODO: remove this, add an onModuleDirective() callback to the parser
- pub fn parse(contents: []const u8) UseDirective {
+ pub fn parse(contents: []const u8) ?UseDirective {
const truncated = std.mem.trimLeft(u8, contents, " \t\n\r;");
if (truncated.len < "'use client';".len)
@@ -8515,30 +8499,124 @@ pub const UseDirective = enum {
const unquoted = directive_string[1 .. directive_string.len - 2];
- if (strings.eqlComptime(
- unquoted,
- "use client",
- )) {
- return .@"use client";
+ if (strings.eqlComptime(unquoted, "use client")) {
+ return .client;
}
- if (strings.eqlComptime(
- unquoted,
- "use server",
- )) {
- return .@"use server";
+ if (strings.eqlComptime(unquoted, "use server")) {
+ return .server;
}
- return .none;
+ return null;
}
+};
- pub fn target(this: UseDirective, default: bun.options.Target) bun.options.Target {
- return switch (this) {
- .none => default,
- .@"use client" => .browser,
- .@"use server" => .bun,
+/// Represents a boundary between client and server code. Every boundary
+/// gets bundled twice, once for the desired target, and once to generate
+/// a module of "references". Specifically, the generated file takes the
+/// canonical Ast as input to derive a wrapper. See `Framework.ServerComponents`
+/// for more details about this generated file.
+///
+/// This is sometimes abbreviated as SCB
+pub const ServerComponentBoundary = struct {
+ use_directive: UseDirective,
+
+ /// The index of the original file.
+ source_index: Index.Int,
+
+ /// Index to the file imported on the opposite platform, which is
+ /// generated by the bundler. For client components, this is the
+ /// server's code. For server actions, this is the client's code.
+ reference_source_index: Index.Int,
+
+ /// When `kit.Framework.ServerComponents.separate_ssr_graph` is enabled this
+ /// points to the separated module. When the SSR graph is not separate, this is
+ /// equal to `reference_source_index`
+ //
+ // TODO: Is this used for server actions.
+ ssr_source_index: Index.Int,
+
+ /// The requirements for this data structure is to have reasonable lookup
+ /// speed, but also being able to pull a `[]const Index.Int` of all
+ /// boundaries for iteration.
+ pub const List = struct {
+ list: std.MultiArrayList(ServerComponentBoundary) = .{},
+ /// Used to facilitate fast lookups into `items` by `.source_index`
+ map: Map = .{},
+
+ const Map = std.ArrayHashMapUnmanaged(void, void, struct {}, true);
+
+ /// Can only be called on the bundler thread.
+ pub fn put(
+ m: *List,
+ allocator: std.mem.Allocator,
+ source_index: Index.Int,
+ use_directive: UseDirective,
+ reference_source_index: Index.Int,
+ ssr_source_index: Index.Int,
+ ) !void {
+ try m.list.append(allocator, .{
+ .source_index = source_index,
+ .use_directive = use_directive,
+ .reference_source_index = reference_source_index,
+ .ssr_source_index = ssr_source_index,
+ });
+ const gop = try m.map.getOrPutAdapted(
+ allocator,
+ source_index,
+ Adapter{ .list = m.list.slice() },
+ );
+ bun.assert(!gop.found_existing);
+ }
+
+ /// Can only be called on the bundler thread.
+ pub fn getIndex(l: *const List, real_source_index: Index.Int) ?usize {
+ return l.map.getIndexAdapted(
+ real_source_index,
+ Adapter{ .list = l.list.slice() },
+ );
+ }
+
+ /// Use this to improve speed of accessing fields at the cost of
+ /// storing more pointers. Invalidated when input is mutated.
+ pub fn slice(l: List) Slice {
+ return .{ .list = l.list.slice(), .map = l.map };
+ }
+
+ pub const Slice = struct {
+ list: std.MultiArrayList(ServerComponentBoundary).Slice,
+ map: Map,
+
+ pub fn getIndex(l: *const Slice, real_source_index: Index.Int) ?usize {
+ return l.map.getIndexAdapted(
+ real_source_index,
+ Adapter{ .list = l.list },
+ ) orelse return null;
+ }
+
+ pub fn getReferenceSourceIndex(l: *const Slice, real_source_index: Index.Int) ?u32 {
+ const i = l.map.getIndexAdapted(
+ real_source_index,
+ Adapter{ .list = l.list },
+ ) orelse return null;
+ bun.unsafeAssert(l.list.capacity > 0); // optimize MultiArrayList.Slice.items
+ return l.list.items(.reference_source_index)[i];
+ }
};
- }
+
+ pub const Adapter = struct {
+ list: std.MultiArrayList(ServerComponentBoundary).Slice,
+
+ pub fn hash(_: Adapter, key: Index.Int) u32 {
+ return std.hash.uint32(key);
+ }
+
+ pub fn eql(adapt: Adapter, a: Index.Int, _: void, b_index: usize) bool {
+ bun.unsafeAssert(adapt.list.capacity > 0); // optimize MultiArrayList.Slice.items
+ return a == adapt.list.items(.source_index)[b_index];
+ }
+ };
+ };
};
pub const GlobalStoreHandle = struct {
diff --git a/src/js_parser.zig b/src/js_parser.zig
index 6b03e651aa669..047b6bfa33373 100644
--- a/src/js_parser.zig
+++ b/src/js_parser.zig
@@ -1061,7 +1061,7 @@ pub const ImportScanner = struct {
stmts: []Stmt,
will_transform_to_common_js: bool,
comptime hot_module_reloading_transformations: bool,
- hot_module_reloading_context: if (hot_module_reloading_transformations) *P.ConvertESMExportsForHmr else void,
+ hot_module_reloading_context: if (hot_module_reloading_transformations) *ConvertESMExportsForHmr else void,
) !ImportScanner {
var scanner = ImportScanner{};
var stmts_end: usize = 0;
@@ -1077,7 +1077,7 @@ pub const ImportScanner = struct {
st__.* = st;
}
- var record: *ImportRecord = &p.import_records.items[st.import_record_index];
+ const record: *ImportRecord = &p.import_records.items[st.import_record_index];
if (record.path.isMacro()) {
record.is_unused = true;
@@ -1272,7 +1272,7 @@ pub const ImportScanner = struct {
result.* = alias;
}
strings.sortDesc(sorted);
- p.named_imports.ensureUnusedCapacity(sorted.len) catch unreachable;
+ p.named_imports.ensureUnusedCapacity(p.allocator, sorted.len) catch bun.outOfMemory();
// Create named imports for these property accesses. This will
// cause missing imports to generate useful warnings.
@@ -1283,6 +1283,7 @@ pub const ImportScanner = struct {
for (sorted) |alias| {
const item = existing_items.get(alias).?;
p.named_imports.put(
+ p.allocator,
item.ref.?,
js_ast.NamedImport{
.alias = alias,
@@ -1290,7 +1291,7 @@ pub const ImportScanner = struct {
.namespace_ref = namespace_ref,
.import_record_index = st.import_record_index,
},
- ) catch unreachable;
+ ) catch bun.outOfMemory();
const name: LocRef = item;
const name_ref = name.ref.?;
@@ -1314,8 +1315,9 @@ pub const ImportScanner = struct {
}
p.named_imports.ensureUnusedCapacity(
+ p.allocator,
st.items.len + @as(usize, @intFromBool(st.default_name != null)) + @as(usize, @intFromBool(st.star_name_loc != null)),
- ) catch unreachable;
+ ) catch bun.outOfMemory();
if (st.star_name_loc) |loc| {
p.named_imports.putAssumeCapacity(
@@ -1370,7 +1372,7 @@ pub const ImportScanner = struct {
const name: LocRef = item.name;
const name_ref = name.ref.?;
- try p.named_imports.put(name_ref, js_ast.NamedImport{
+ try p.named_imports.put(p.allocator, name_ref, js_ast.NamedImport{
.alias = item.alias,
.alias_loc = name.loc,
.namespace_ref = namespace_ref,
@@ -1486,9 +1488,9 @@ pub const ImportScanner = struct {
// Rewrite this export to be:
// exports.default =
// But only if it's anonymous
- if (!hot_module_reloading_transformations and will_transform_to_common_js) {
+ if (!hot_module_reloading_transformations and will_transform_to_common_js and P != bun.bundle_v2.AstBuilder) {
const expr = st.value.toExpr();
- var export_default_args = p.allocator.alloc(Expr, 2) catch unreachable;
+ var export_default_args = try p.allocator.alloc(Expr, 2);
export_default_args[0] = p.@"module.exports"(expr.loc);
export_default_args[1] = expr;
stmt = p.s(S.SExpr{ .value = p.callRuntime(expr.loc, "__exportDefault", export_default_args) }, expr.loc);
@@ -1504,7 +1506,7 @@ pub const ImportScanner = struct {
if (st.alias) |alias| {
// "export * as ns from 'path'"
- try p.named_imports.put(st.namespace_ref, js_ast.NamedImport{
+ try p.named_imports.put(p.allocator, st.namespace_ref, js_ast.NamedImport{
.alias = null,
.alias_is_star = true,
.alias_loc = alias.loc,
@@ -1522,13 +1524,13 @@ pub const ImportScanner = struct {
},
.s_export_from => |st| {
try p.import_records_for_current_part.append(allocator, st.import_record_index);
- p.named_imports.ensureUnusedCapacity(st.items.len) catch unreachable;
+ p.named_imports.ensureUnusedCapacity(p.allocator, st.items.len) catch unreachable;
for (st.items) |item| {
const ref = item.name.ref orelse p.panic("Expected export from item to have a name {any}", .{st});
// Note that the imported alias is not item.Alias, which is the
// exported alias. This is somewhat confusing because each
// SExportFrom statement is basically SImport + SExportClause in one.
- try p.named_imports.put(ref, js_ast.NamedImport{
+ try p.named_imports.put(p.allocator, ref, js_ast.NamedImport{
.alias_is_star = false,
.alias = item.original_name,
.alias_loc = item.name.loc,
@@ -2842,7 +2844,7 @@ pub const ScanPassResult = struct {
pub fn init(allocator: Allocator) ScanPassResult {
return .{
.import_records = ListManaged(ImportRecord).init(allocator),
- .named_imports = js_ast.Ast.NamedImports.init(allocator),
+ .named_imports = .{},
.used_symbols = ParsePassSymbolUsageMap.init(allocator),
.import_records_to_keep = ListManaged(u32).init(allocator),
.approximate_newline_count = 0,
@@ -3715,7 +3717,6 @@ pub const Parser = struct {
part.symbol_uses = .{};
return js_ast.Result{
.ast = js_ast.Ast{
- .allocator = p.allocator,
.import_records = ImportRecord.List.init(p.import_records.items),
.redirect_import_record_index = id,
.named_imports = p.named_imports,
@@ -4421,7 +4422,7 @@ const ParserFeatures = struct {
scan_only: bool = false,
};
-const ImportItemForNamespaceMap = bun.StringArrayHashMap(LocRef);
+pub const ImportItemForNamespaceMap = bun.StringArrayHashMap(LocRef);
pub const KnownGlobal = enum {
WeakSet,
@@ -5797,7 +5798,7 @@ fn NewParser_(
}
}
- pub fn recordExport(p: *P, loc: logger.Loc, alias: string, ref: Ref) anyerror!void {
+ pub fn recordExport(p: *P, loc: logger.Loc, alias: string, ref: Ref) !void {
if (p.named_exports.get(alias)) |name| {
// Duplicate exports are an error
var notes = try p.allocator.alloc(logger.Data, 1);
@@ -5814,7 +5815,7 @@ fn NewParser_(
.{std.mem.trim(u8, alias, "\"'")},
);
} else if (!p.isDeoptimizedCommonJS()) {
- try p.named_exports.put(alias, js_ast.NamedExport{ .alias_loc = loc, .ref = ref });
+ try p.named_exports.put(p.allocator, alias, js_ast.NamedExport{ .alias_loc = loc, .ref = ref });
}
}
@@ -5865,7 +5866,6 @@ fn NewParser_(
},
.e_private_identifier => |private| {
return p.loadNameFromRef(private.ref);
- // return p.loadNameFromRef()
},
else => {
return "property";
@@ -6052,7 +6052,7 @@ fn NewParser_(
};
declared_symbols.appendAssumeCapacity(.{ .ref = ref, .is_top_level = true });
try p.is_import_item.put(allocator, ref, {});
- try p.named_imports.put(ref, js_ast.NamedImport{
+ try p.named_imports.put(allocator, ref, js_ast.NamedImport{
.alias = alias_name,
.alias_loc = logger.Loc{},
.namespace_ref = namespace_ref,
@@ -6147,7 +6147,7 @@ fn NewParser_(
declared_symbols.appendAssumeCapacity(.{ .ref = entry.ref, .is_top_level = true });
try p.module_scope.generated.push(allocator, entry.ref);
try p.is_import_item.put(allocator, entry.ref, {});
- try p.named_imports.put(entry.ref, .{
+ try p.named_imports.put(allocator, entry.ref, .{
.alias = entry.name,
.alias_loc = logger.Loc.Empty,
.namespace_ref = namespace_ref,
@@ -9200,6 +9200,8 @@ fn NewParser_(
}
}
}
+ } else if (import_tag == .kit_resolve_to_ssr_graph) {
+ p.import_records.items[stmt.import_record_index].tag = import_tag;
}
}
@@ -10206,7 +10208,7 @@ fn NewParser_(
isForAwait = false;
} else {
// TODO: improve error handling here
- // didGenerateError := p.markSyntaxFeature(compat.ForAwait, awaitRange)
+ // didGenerateError := p.markSyntaxFeature(compat.ForAwait, awaitRange)
if (p.fn_or_arrow_data_parse.is_top_level) {
p.top_level_await_keyword = await_range;
// p.markSyntaxFeature(compat.TopLevelAwait, awaitRange)
@@ -12209,6 +12211,7 @@ fn NewParser_(
const SupportedAttribute = enum {
type,
embed,
+ bunKitGraph,
};
var has_seen_embed_true = false;
@@ -12217,21 +12220,17 @@ fn NewParser_(
const supported_attribute: ?SupportedAttribute = brk: {
// Parse the key
if (p.lexer.isIdentifierOrKeyword()) {
- if (strings.eqlComptime(p.lexer.identifier, "type")) {
- break :brk .type;
- }
-
- if (strings.eqlComptime(p.lexer.identifier, "embed")) {
- break :brk .embed;
+ inline for (comptime std.enums.values(SupportedAttribute)) |t| {
+ if (strings.eqlComptime(p.lexer.identifier, @tagName(t))) {
+ break :brk t;
+ }
}
} else if (p.lexer.token == .t_string_literal) {
if (p.lexer.string_literal_is_ascii) {
- if (strings.eqlComptime(p.lexer.string_literal_slice, "type")) {
- break :brk .type;
- }
-
- if (strings.eqlComptime(p.lexer.string_literal_slice, "embed")) {
- break :brk .embed;
+ inline for (comptime std.enums.values(SupportedAttribute)) |t| {
+ if (strings.eqlComptime(p.lexer.string_literal_slice, @tagName(t))) {
+ break :brk t;
+ }
}
}
} else {
@@ -12275,6 +12274,13 @@ fn NewParser_(
}
}
},
+ .bunKitGraph => {
+ if (strings.eqlComptime(p.lexer.string_literal_slice, "ssr")) {
+ path.import_tag = .kit_resolve_to_ssr_graph;
+ } else {
+ try p.lexer.addRangeError(p.lexer.range(), "'bunKitGraph' can only be set to 'ssr'", .{}, true);
+ }
+ },
}
}
}
@@ -16225,10 +16231,10 @@ fn NewParser_(
return exp;
}
- // // Capture "this" inside arrow functions that will be lowered into normal
+ // // Capture "this" inside arrow functions that will be lowered into normal
// // function expressions for older language environments
// if p.fnOrArrowDataVisit.isArrow && p.options.unsupportedJSFeatures.Has(compat.Arrow) && p.fnOnlyDataVisit.isThisNested {
- // return js_ast.Expr{Loc: expr.Loc, Data: &js_ast.EIdentifier{Ref: p.captureThis()}}, exprOut{}
+ // return js_ast.Expr{Loc: expr.Loc, Data: &js_ast.EIdentifier{Ref: p.captureThis()}}, exprOut{}
// }
},
.e_import_meta => {
@@ -19931,7 +19937,7 @@ fn NewParser_(
data.cases[i].value = p.visitExpr(val);
// TODO: error messages
// Check("case", *c.Value, c.Value.Loc)
- // p.warnAboutTypeofAndString(s.Test, *c.Value)
+ // p.warnAboutTypeofAndString(s.Test, *c.Value)
}
var _stmts = ListManaged(Stmt).fromOwnedSlice(p.allocator, case.body);
p.visitStmts(&_stmts, StmtsKind.none) catch unreachable;
@@ -23488,15 +23494,14 @@ fn NewParser_(
parts_list.cap = @intCast(input_parts.len);
return .{
- .allocator = p.allocator,
.runtime_imports = p.runtime_imports,
.parts = parts_list,
.module_scope = p.module_scope.*,
- .symbols = js_ast.Symbol.List.init(p.symbols.items),
+ .symbols = js_ast.Symbol.List.fromList(p.symbols),
.exports_ref = p.exports_ref,
.wrapper_ref = wrapper_ref,
.module_ref = p.module_ref,
- .import_records = ImportRecord.List.init(p.import_records.items),
+ .import_records = ImportRecord.List.fromList(p.import_records),
.export_star_import_records = p.export_star_import_records.items,
.approximate_newline_count = p.lexer.approximate_newline_count,
.exports_kind = exports_kind,
@@ -23588,317 +23593,6 @@ fn NewParser_(
return false;
}
- const ConvertESMExportsForHmr = struct {
- last_part: *js_ast.Part,
- imports_seen: std.AutoArrayHashMapUnmanaged(u32, void) = .{},
- export_props: std.ArrayListUnmanaged(G.Property) = .{},
- stmts: std.ArrayListUnmanaged(Stmt) = .{},
-
- fn convertStmt(ctx: *ConvertESMExportsForHmr, p: *P, stmt: Stmt) !void {
- const new_stmt = switch (stmt.data) {
- else => stmt,
- .s_local => |st| stmt: {
- if (!st.is_export) break :stmt stmt;
-
- st.is_export = false;
-
- if (st.kind.isReassignable()) {
- for (st.decls.slice()) |decl| {
- try ctx.visitBindingForKitModuleExports(p, decl.binding, true);
- }
- } else {
- // TODO: remove this dupe
- var dupe_decls = try std.ArrayListUnmanaged(G.Decl).initCapacity(p.allocator, st.decls.len);
-
- for (st.decls.slice()) |decl| {
- bun.assert(decl.value != null); // const must be initialized
-
- switch (decl.binding.data) {
- .b_missing => @panic("binding missing"),
-
- .b_identifier => |id| {
- const symbol = p.symbols.items[id.ref.inner_index];
-
- // if the symbol is not used, we don't need to preserve
- // a binding in this scope. we can move it to the exports object.
- if (symbol.use_count_estimate != 0 or !decl.value.?.canBeMoved()) {
- dupe_decls.appendAssumeCapacity(decl);
- }
-
- try ctx.export_props.append(p.allocator, .{
- .key = Expr.init(E.String, .{ .data = symbol.original_name }, decl.binding.loc),
- .value = decl.value,
- });
- },
-
- else => {
- dupe_decls.appendAssumeCapacity(decl);
- try ctx.visitBindingForKitModuleExports(p, decl.binding, false);
- },
- }
- }
-
- if (dupe_decls.items.len == 0) {
- return;
- }
-
- st.decls = G.Decl.List.fromList(dupe_decls);
- }
-
- break :stmt stmt;
- },
- .s_export_default => |st| stmt: {
- // Simple case: we can move this to the default property of the exports object
- if (st.canBeMoved()) {
- try ctx.export_props.append(p.allocator, .{
- .key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
- .value = st.value.toExpr(),
- });
- // no statement emitted
- return;
- }
-
- // Otherwise, we need a temporary
- const temp_id = p.generateTempRef("default_export");
- try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = temp_id, .is_top_level = true });
- try ctx.last_part.symbol_uses.putNoClobber(p.allocator, temp_id, .{ .count_estimate = 1 });
- try p.module_scope.generated.push(p.allocator, temp_id);
-
- try ctx.export_props.append(p.allocator, .{
- .key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
- .value = Expr.initIdentifier(temp_id, stmt.loc),
- });
-
- break :stmt Stmt.alloc(S.Local, .{
- .kind = .k_const,
- .decls = try G.Decl.List.fromSlice(p.allocator, &.{
- .{
- .binding = Binding.alloc(p.allocator, B.Identifier{ .ref = temp_id }, stmt.loc),
- .value = st.value.toExpr(),
- },
- }),
- }, stmt.loc);
- },
- .s_class => |st| stmt: {
- // Strip the "export" keyword
- if (!st.is_export) break :stmt stmt;
-
- // Export as CommonJS
- try ctx.export_props.append(p.allocator, .{
- .key = Expr.init(E.String, .{
- .data = p.symbols.items[st.class.class_name.?.ref.?.inner_index].original_name,
- }, stmt.loc),
- .value = Expr.initIdentifier(st.class.class_name.?.ref.?, stmt.loc),
- });
-
- st.is_export = false;
-
- break :stmt stmt;
- },
- .s_function => |st| stmt: {
- // Strip the "export" keyword
- if (!st.func.flags.contains(.is_export)) break :stmt stmt;
-
- st.func.flags.remove(.is_export);
-
- // Export as CommonJS
- try ctx.export_props.append(p.allocator, .{
- .key = Expr.init(E.String, .{
- .data = p.symbols.items[st.func.name.?.ref.?.inner_index].original_name,
- }, stmt.loc),
- .value = Expr.initIdentifier(st.func.name.?.ref.?, stmt.loc),
- });
-
- break :stmt stmt;
- },
- .s_export_clause => |st| {
- for (st.items) |item| {
- try ctx.export_props.append(p.allocator, .{
- .key = Expr.init(E.String, .{
- .data = item.alias,
- }, stmt.loc),
- .value = Expr.initIdentifier(item.name.ref.?, item.name.loc),
- });
- }
-
- return; // do not emit a statement here
- },
-
- .s_export_from => |st| {
- _ = st; // autofix
- @panic("TODO s_export_from");
- },
- .s_export_star => |st| {
- _ = st; // autofix
- @panic("TODO s_export_star");
- },
-
- // De-duplicate import statements. It is okay to disregard
- // named/default imports here as we always rewrite them as
- // full qualified property accesses (need to so live-bindings)
- .s_import => |st| stmt: {
- const gop = try ctx.imports_seen.getOrPut(p.allocator, st.import_record_index);
- if (gop.found_existing) return;
- break :stmt stmt;
- },
- };
-
- try ctx.stmts.append(p.allocator, new_stmt);
- }
-
- fn visitBindingForKitModuleExports(
- ctx: *ConvertESMExportsForHmr,
- p: *P,
- binding: Binding,
- is_live_binding: bool,
- ) !void {
- switch (binding.data) {
- .b_missing => @panic("missing!"),
- .b_identifier => |id| {
- try ctx.visitRefForKitModuleExports(p, id.ref, binding.loc, is_live_binding);
- },
- .b_array => |array| {
- for (array.items) |item| {
- try ctx.visitBindingForKitModuleExports(p, item.binding, is_live_binding);
- }
- },
- .b_object => |object| {
- for (object.properties) |item| {
- try ctx.visitBindingForKitModuleExports(p, item.value, is_live_binding);
- }
- },
- }
- }
-
- fn visitRefForKitModuleExports(
- ctx: *ConvertESMExportsForHmr,
- p: *P,
- ref: Ref,
- loc: logger.Loc,
- is_live_binding: bool,
- ) !void {
- const symbol = p.symbols.items[ref.inner_index];
- const id = Expr.initIdentifier(ref, loc);
- if (is_live_binding) {
- const key = Expr.init(E.String, .{
- .data = symbol.original_name,
- }, loc);
-
- // This is technically incorrect in that we've marked this as a
- // top level symbol. but all we care about is preventing name
- // collisions, not necessarily the best minificaiton (dev only)
- const arg1 = p.generateTempRef(symbol.original_name);
- try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = arg1, .is_top_level = true });
- try ctx.last_part.symbol_uses.putNoClobber(p.allocator, arg1, .{ .count_estimate = 1 });
- try p.module_scope.generated.push(p.allocator, arg1);
-
- // Live bindings need to update the value internally and externally.
- // 'get abc() { return abc }'
- try ctx.export_props.append(p.allocator, .{
- .kind = .get,
- .key = key,
- .value = Expr.init(E.Function, .{ .func = .{
- .body = .{
- .stmts = try p.allocator.dupe(Stmt, &.{
- Stmt.alloc(S.Return, .{ .value = id }, loc),
- }),
- .loc = loc,
- },
- } }, loc),
- });
- // 'set abc(abc2) { abc = abc2 }'
- try ctx.export_props.append(p.allocator, .{
- .kind = .set,
- .key = key,
- .value = Expr.init(E.Function, .{ .func = .{
- .args = try p.allocator.dupe(G.Arg, &.{.{
- .binding = Binding.alloc(p.allocator, B.Identifier{ .ref = arg1 }, loc),
- }}),
- .body = .{
- .stmts = try p.allocator.dupe(Stmt, &.{
- Stmt.alloc(S.SExpr, .{
- .value = Expr.assign(id, Expr.initIdentifier(arg1, loc)),
- }, loc),
- }),
- .loc = loc,
- },
- } }, loc),
- });
- } else {
- // 'abc,'
- try ctx.export_props.append(p.allocator, .{
- .key = Expr.init(E.String, .{
- .data = symbol.original_name,
- }, loc),
- .value = id,
- });
- }
- }
-
- pub fn finalize(ctx: *ConvertESMExportsForHmr, p: *P, all_parts: []js_ast.Part) ![]js_ast.Part {
- if (ctx.export_props.items.len > 0) {
- // add a marker for the client runtime to tell that this is an ES module
- try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{
- .value = Expr.assign(
- Expr.init(E.Dot, .{
- .target = Expr.initIdentifier(p.module_ref, logger.Loc.Empty),
- .name = "__esModule",
- .name_loc = logger.Loc.Empty,
- }, logger.Loc.Empty),
- Expr.init(E.Boolean, .{ .value = true }, logger.Loc.Empty),
- ),
- }, logger.Loc.Empty));
-
- try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{
- .value = Expr.assign(
- Expr.init(E.Dot, .{
- .target = Expr.initIdentifier(p.module_ref, logger.Loc.Empty),
- .name = "exports",
- .name_loc = logger.Loc.Empty,
- }, logger.Loc.Empty),
- Expr.init(E.Object, .{
- .properties = G.Property.List.fromList(ctx.export_props),
- }, logger.Loc.Empty),
- ),
- }, logger.Loc.Empty));
-
- // mark a dependency on module_ref so it is renamed
- try ctx.last_part.symbol_uses.put(p.allocator, p.module_ref, .{ .count_estimate = 1 });
- try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = p.module_ref, .is_top_level = true });
- }
-
- // TODO: this is a tiny mess. it is honestly trying to hard to merge all parts into one
- for (all_parts[0 .. all_parts.len - 1]) |*part| {
- try ctx.last_part.declared_symbols.appendList(p.allocator, part.declared_symbols);
- try ctx.last_part.import_record_indices.append(p.allocator, part.import_record_indices.slice());
- for (part.symbol_uses.keys(), part.symbol_uses.values()) |k, v| {
- const gop = try ctx.last_part.symbol_uses.getOrPut(p.allocator, k);
- if (!gop.found_existing) {
- gop.value_ptr.* = v;
- } else {
- gop.value_ptr.count_estimate += v.count_estimate;
- }
- }
- part.stmts = &.{};
- part.declared_symbols.entries.len = 0;
- part.tag = .dead_due_to_inlining;
- part.dependencies.clearRetainingCapacity();
- try part.dependencies.push(p.allocator, .{
- .part_index = @intCast(all_parts.len - 1),
- .source_index = p.source.index,
- });
- }
-
- try ctx.last_part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items);
- try ctx.last_part.declared_symbols.appendList(p.allocator, p.declared_symbols);
-
- ctx.last_part.stmts = ctx.stmts.items;
- ctx.last_part.tag = .none;
-
- return all_parts;
- }
- };
-
pub fn init(
allocator: Allocator,
log: *logger.Log,
@@ -23933,7 +23627,7 @@ fn NewParser_(
.define = define,
.import_records = undefined,
.named_imports = undefined,
- .named_exports = js_ast.Ast.NamedExports.init(allocator),
+ .named_exports = .{},
.log = log,
.allocator = allocator,
.options = opts,
@@ -23979,7 +23673,7 @@ fn NewParser_(
if (comptime !only_scan_imports_and_do_not_visit) {
this.import_records = @TypeOf(this.import_records).init(allocator);
- this.named_imports = NamedImportsType.init(allocator);
+ this.named_imports = .{};
}
this.to_expr_wrapper_namespace = Binding2ExprWrapper.Namespace.init(this);
@@ -24095,6 +23789,316 @@ const WrapMode = enum {
bun_commonjs,
};
+pub const ConvertESMExportsForHmr = struct {
+ last_part: *js_ast.Part,
+ imports_seen: std.AutoArrayHashMapUnmanaged(u32, void) = .{},
+ export_props: std.ArrayListUnmanaged(G.Property) = .{},
+ stmts: std.ArrayListUnmanaged(Stmt) = .{},
+
+ fn convertStmt(ctx: *ConvertESMExportsForHmr, p: anytype, stmt: Stmt) !void {
+ const new_stmt = switch (stmt.data) {
+ else => stmt,
+ .s_local => |st| stmt: {
+ if (!st.is_export) break :stmt stmt;
+
+ st.is_export = false;
+
+ if (st.kind.isReassignable()) {
+ for (st.decls.slice()) |decl| {
+ try ctx.visitBindingForKitModuleExports(p, decl.binding, true);
+ }
+ } else {
+ // TODO: remove this dupe
+ var dupe_decls = try std.ArrayListUnmanaged(G.Decl).initCapacity(p.allocator, st.decls.len);
+
+ for (st.decls.slice()) |decl| {
+ bun.assert(decl.value != null); // const must be initialized
+
+ switch (decl.binding.data) {
+ .b_missing => {},
+
+ .b_identifier => |id| {
+ const symbol = p.symbols.items[id.ref.inner_index];
+
+ // if the symbol is not used, we don't need to preserve
+ // a binding in this scope. we can move it to the exports object.
+ if (symbol.use_count_estimate == 0 and decl.value.?.canBeMoved()) {
+ try ctx.export_props.append(p.allocator, .{
+ .key = Expr.init(E.String, .{ .data = symbol.original_name }, decl.binding.loc),
+ .value = decl.value,
+ });
+ } else {
+ dupe_decls.appendAssumeCapacity(decl);
+ try ctx.visitBindingForKitModuleExports(p, decl.binding, false);
+ }
+ },
+
+ else => {
+ dupe_decls.appendAssumeCapacity(decl);
+ try ctx.visitBindingForKitModuleExports(p, decl.binding, false);
+ },
+ }
+ }
+
+ if (dupe_decls.items.len == 0) {
+ return;
+ }
+
+ st.decls = G.Decl.List.fromList(dupe_decls);
+ }
+
+ break :stmt stmt;
+ },
+ .s_export_default => |st| stmt: {
+ // Simple case: we can move this to the default property of the exports object
+ if (st.canBeMoved()) {
+ try ctx.export_props.append(p.allocator, .{
+ .key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
+ .value = st.value.toExpr(),
+ });
+ // no statement emitted
+ return;
+ }
+
+ // Otherwise, we need a temporary
+ const temp_id = p.generateTempRef("default_export");
+ try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = temp_id, .is_top_level = true });
+ try ctx.last_part.symbol_uses.putNoClobber(p.allocator, temp_id, .{ .count_estimate = 1 });
+ try p.current_scope.generated.push(p.allocator, temp_id);
+
+ try ctx.export_props.append(p.allocator, .{
+ .key = Expr.init(E.String, .{ .data = "default" }, stmt.loc),
+ .value = Expr.initIdentifier(temp_id, stmt.loc),
+ });
+
+ break :stmt Stmt.alloc(S.Local, .{
+ .kind = .k_const,
+ .decls = try G.Decl.List.fromSlice(p.allocator, &.{
+ .{
+ .binding = Binding.alloc(p.allocator, B.Identifier{ .ref = temp_id }, stmt.loc),
+ .value = st.value.toExpr(),
+ },
+ }),
+ }, stmt.loc);
+ },
+ .s_class => |st| stmt: {
+ // Strip the "export" keyword
+ if (!st.is_export) break :stmt stmt;
+
+ // Export as CommonJS
+ try ctx.export_props.append(p.allocator, .{
+ .key = Expr.init(E.String, .{
+ .data = p.symbols.items[st.class.class_name.?.ref.?.inner_index].original_name,
+ }, stmt.loc),
+ .value = Expr.initIdentifier(st.class.class_name.?.ref.?, stmt.loc),
+ });
+
+ st.is_export = false;
+
+ break :stmt stmt;
+ },
+ .s_function => |st| stmt: {
+ // Strip the "export" keyword
+ if (!st.func.flags.contains(.is_export)) break :stmt stmt;
+
+ st.func.flags.remove(.is_export);
+
+ // Export as CommonJS
+ try ctx.export_props.append(p.allocator, .{
+ .key = Expr.init(E.String, .{
+ .data = p.symbols.items[st.func.name.?.ref.?.inner_index].original_name,
+ }, stmt.loc),
+ .value = Expr.initIdentifier(st.func.name.?.ref.?, stmt.loc),
+ });
+
+ break :stmt stmt;
+ },
+ .s_export_clause => |st| {
+ for (st.items) |item| {
+ try ctx.export_props.append(p.allocator, .{
+ .key = Expr.init(E.String, .{
+ .data = item.alias,
+ }, stmt.loc),
+ .value = Expr.initIdentifier(item.name.ref.?, item.name.loc),
+ });
+ }
+
+ return; // do not emit a statement here
+ },
+
+ .s_export_from => {
+ bun.todoPanic(@src(), "hot-module-reloading instrumentation for 'export {{ ... }} from'", .{});
+ },
+ .s_export_star => {
+ bun.todoPanic(@src(), "hot-module-reloading instrumentation for 'export * from'", .{});
+ },
+
+ // De-duplicate import statements. It is okay to disregard
+ // named/default imports here as we always rewrite them as
+ // full qualified property accesses (need to so live-bindings)
+ .s_import => |st| stmt: {
+ const gop = try ctx.imports_seen.getOrPut(p.allocator, st.import_record_index);
+ if (gop.found_existing) return;
+ break :stmt stmt;
+ },
+ };
+
+ try ctx.stmts.append(p.allocator, new_stmt);
+ }
+
+ fn visitBindingForKitModuleExports(
+ ctx: *ConvertESMExportsForHmr,
+ p: anytype,
+ binding: Binding,
+ is_live_binding: bool,
+ ) !void {
+ switch (binding.data) {
+ .b_missing => {},
+ .b_identifier => |id| {
+ try ctx.visitRefForKitModuleExports(p, id.ref, binding.loc, is_live_binding);
+ },
+ .b_array => |array| {
+ for (array.items) |item| {
+ try ctx.visitBindingForKitModuleExports(p, item.binding, is_live_binding);
+ }
+ },
+ .b_object => |object| {
+ for (object.properties) |item| {
+ try ctx.visitBindingForKitModuleExports(p, item.value, is_live_binding);
+ }
+ },
+ }
+ }
+
+ fn visitRefForKitModuleExports(
+ ctx: *ConvertESMExportsForHmr,
+ p: anytype,
+ ref: Ref,
+ loc: logger.Loc,
+ is_live_binding: bool,
+ ) !void {
+ const symbol = p.symbols.items[ref.inner_index];
+ const id = Expr.initIdentifier(ref, loc);
+ if (is_live_binding) {
+ const key = Expr.init(E.String, .{
+ .data = symbol.original_name,
+ }, loc);
+
+ // This is technically incorrect in that we've marked this as a
+ // top level symbol. but all we care about is preventing name
+ // collisions, not necessarily the best minificaiton (dev only)
+ const arg1 = p.generateTempRef(symbol.original_name);
+ try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = arg1, .is_top_level = true });
+ try ctx.last_part.symbol_uses.putNoClobber(p.allocator, arg1, .{ .count_estimate = 1 });
+ try p.current_scope.generated.push(p.allocator, arg1);
+
+ // Live bindings need to update the value internally and externally.
+ // 'get abc() { return abc }'
+ try ctx.export_props.append(p.allocator, .{
+ .kind = .get,
+ .key = key,
+ .value = Expr.init(E.Function, .{ .func = .{
+ .body = .{
+ .stmts = try p.allocator.dupe(Stmt, &.{
+ Stmt.alloc(S.Return, .{ .value = id }, loc),
+ }),
+ .loc = loc,
+ },
+ } }, loc),
+ });
+ // 'set abc(abc2) { abc = abc2 }'
+ try ctx.export_props.append(p.allocator, .{
+ .kind = .set,
+ .key = key,
+ .value = Expr.init(E.Function, .{ .func = .{
+ .args = try p.allocator.dupe(G.Arg, &.{.{
+ .binding = Binding.alloc(p.allocator, B.Identifier{ .ref = arg1 }, loc),
+ }}),
+ .body = .{
+ .stmts = try p.allocator.dupe(Stmt, &.{
+ Stmt.alloc(S.SExpr, .{
+ .value = Expr.assign(id, Expr.initIdentifier(arg1, loc)),
+ }, loc),
+ }),
+ .loc = loc,
+ },
+ } }, loc),
+ });
+ } else {
+ // 'abc,'
+ try ctx.export_props.append(p.allocator, .{
+ .key = Expr.init(E.String, .{
+ .data = symbol.original_name,
+ }, loc),
+ .value = id,
+ });
+ }
+ }
+
+ pub fn finalize(ctx: *ConvertESMExportsForHmr, p: anytype, all_parts: []js_ast.Part) ![]js_ast.Part {
+ if (ctx.export_props.items.len > 0) {
+ // add a marker for the client runtime to tell that this is an ES module
+ try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{
+ .value = Expr.assign(
+ Expr.init(E.Dot, .{
+ .target = Expr.initIdentifier(p.module_ref, logger.Loc.Empty),
+ .name = "__esModule",
+ .name_loc = logger.Loc.Empty,
+ }, logger.Loc.Empty),
+ Expr.init(E.Boolean, .{ .value = true }, logger.Loc.Empty),
+ ),
+ }, logger.Loc.Empty));
+
+ try ctx.stmts.append(p.allocator, Stmt.alloc(S.SExpr, .{
+ .value = Expr.assign(
+ Expr.init(E.Dot, .{
+ .target = Expr.initIdentifier(p.module_ref, logger.Loc.Empty),
+ .name = "exports",
+ .name_loc = logger.Loc.Empty,
+ }, logger.Loc.Empty),
+ Expr.init(E.Object, .{
+ .properties = G.Property.List.fromList(ctx.export_props),
+ }, logger.Loc.Empty),
+ ),
+ }, logger.Loc.Empty));
+
+ // mark a dependency on module_ref so it is renamed
+ try ctx.last_part.symbol_uses.put(p.allocator, p.module_ref, .{ .count_estimate = 1 });
+ try ctx.last_part.declared_symbols.append(p.allocator, .{ .ref = p.module_ref, .is_top_level = true });
+ }
+
+ // TODO: this is a tiny mess. it is honestly trying to hard to merge all parts into one
+ for (all_parts[0 .. all_parts.len - 1]) |*part| {
+ try ctx.last_part.declared_symbols.appendList(p.allocator, part.declared_symbols);
+ try ctx.last_part.import_record_indices.append(p.allocator, part.import_record_indices.slice());
+ for (part.symbol_uses.keys(), part.symbol_uses.values()) |k, v| {
+ const gop = try ctx.last_part.symbol_uses.getOrPut(p.allocator, k);
+ if (!gop.found_existing) {
+ gop.value_ptr.* = v;
+ } else {
+ gop.value_ptr.count_estimate += v.count_estimate;
+ }
+ }
+ part.stmts = &.{};
+ part.declared_symbols.entries.len = 0;
+ part.tag = .dead_due_to_inlining;
+ part.dependencies.clearRetainingCapacity();
+ try part.dependencies.push(p.allocator, .{
+ .part_index = @intCast(all_parts.len - 1),
+ .source_index = p.source.index,
+ });
+ }
+
+ try ctx.last_part.import_record_indices.append(p.allocator, p.import_records_for_current_part.items);
+ try ctx.last_part.declared_symbols.appendList(p.allocator, p.declared_symbols);
+
+ ctx.last_part.stmts = ctx.stmts.items;
+ ctx.last_part.tag = .none;
+
+ return all_parts;
+ }
+};
+
/// Equivalent of esbuild's js_ast_helpers.ToInt32
fn floatToInt32(f: f64) i32 {
// Special-case non-finite numbers
diff --git a/src/js_printer.zig b/src/js_printer.zig
index 976a29ddd751b..6656faacb2e31 100644
--- a/src/js_printer.zig
+++ b/src/js_printer.zig
@@ -518,7 +518,6 @@ pub const Options = struct {
require_ref: ?Ref = null,
import_meta_ref: Ref = Ref.None,
indent: Indentation = .{},
- externals: []u32 = &[_]u32{},
runtime_imports: runtime.Runtime.Imports = runtime.Runtime.Imports{},
module_hash: u32 = 0,
source_path: ?fs.Path = null,
@@ -1092,11 +1091,6 @@ fn NewPrinter(
printInternalBunImport(p, import, @TypeOf("globalThis.Bun"), "globalThis.Bun");
}
- fn printHardcodedImportStatement(p: *Printer, import: S.Import) void {
- if (comptime !is_bun_platform) unreachable;
- printInternalBunImport(p, import, void, {});
- }
-
fn printInternalBunImport(p: *Printer, import: S.Import, comptime Statement: type, statement: Statement) void {
if (comptime !is_bun_platform) unreachable;
@@ -2039,7 +2033,9 @@ fn NewPrinter(
p.print(".require(");
{
const path = input_files[record.source_index.get()].path;
- p.printInlinedEnum(.{ .number = @floatFromInt(path.hashForKit()) }, path.pretty, level);
+ p.print('"');
+ p.printUTF8StringEscapedQuotes(path.pretty, '"');
+ p.print('"');
}
p.print(")");
} else if (!meta.was_unwrapped_require) {
@@ -2085,18 +2081,12 @@ fn NewPrinter(
return;
}
- const is_external = std.mem.indexOfScalar(
- u32,
- p.options.externals,
- import_record_index,
- ) != null;
-
// External "require()"
if (record.kind != .dynamic) {
p.printSpaceBeforeIdentifier();
if (p.options.inline_require_and_import_errors) {
- if (record.path.is_disabled and record.handles_import_errors and !is_external) {
+ if (record.path.is_disabled and record.handles_import_errors) {
p.printRequireError(record.path.text);
return;
}
@@ -2107,6 +2097,23 @@ fn NewPrinter(
}
}
+ if (p.options.module_type == .internal_kit_dev) {
+ p.printSpaceBeforeIdentifier();
+ p.printSymbol(p.options.commonjs_module_ref);
+ if (record.tag == .builtin)
+ p.print(".importBuiltin(")
+ else
+ p.print(".require(");
+ {
+ const path = record.path;
+ p.print('"');
+ p.printUTF8StringEscapedQuotes(path.pretty, '"');
+ p.print('"');
+ }
+ p.print(")");
+ return;
+ }
+
if (p.options.module_type == .esm and is_bun_platform) {
p.print("import.meta.require");
} else if (p.options.require_ref) |ref| {
@@ -2626,7 +2633,12 @@ fn NewPrinter(
p.printSpaceBeforeIdentifier();
p.addSourceMapping(expr.loc);
- p.print("import(");
+ if (p.options.module_type == .internal_kit_dev) {
+ p.printSymbol(p.options.commonjs_module_ref);
+ p.print(".dynamicImport(");
+ } else {
+ p.print("import(");
+ }
// TODO:
// if (e.leading_interior_comments.len > 0) {
// p.printNewline();
@@ -3110,6 +3122,10 @@ fn NewPrinter(
e.ref;
const symbol = p.symbols().get(ref).?;
+ // if (bun.strings.eql(symbol.original_name, "registerClientReference")) {
+ // @breakpoint();
+ // }
+
if (symbol.import_item_status == .missing) {
p.printUndefined(expr.loc, level);
didPrint = true;
@@ -3444,13 +3460,8 @@ fn NewPrinter(
}
}
- pub fn printNamespaceAlias(p: *Printer, import_record: ImportRecord, namespace: G.NamespaceAlias) void {
- if (import_record.module_id > 0 and !import_record.contains_import_star) {
- p.print("$");
- p.printModuleId(import_record.module_id);
- } else {
- p.printSymbol(namespace.namespace_ref);
- }
+ pub fn printNamespaceAlias(p: *Printer, _: ImportRecord, namespace: G.NamespaceAlias) void {
+ p.printSymbol(namespace.namespace_ref);
// In the case of code like this:
// module.exports = require("foo")
@@ -3710,9 +3721,10 @@ fn NewPrinter(
}
}
},
- // .e_import_identifier => |e| inner: {
- .e_import_identifier => |e| {
+ .e_import_identifier => |e| inner: {
const ref = p.symbols().follow(e.ref);
+ if (p.options.input_files_for_kit != null)
+ break :inner;
// if (p.options.const_values.count() > 0 and p.options.const_values.contains(ref))
// break :inner;
@@ -4345,62 +4357,6 @@ fn NewPrinter(
const import_record = p.importRecord(s.import_record_index);
- if (comptime is_bun_platform) {
- if (import_record.do_commonjs_transform_in_printer) {
- if (s.items.len == 0)
- return;
- p.print("var {");
- var symbol_counter: u32 = p.symbol_counter;
-
- for (s.items, 0..) |item, i| {
- if (i > 0) {
- p.print(",");
- }
-
- p.print(item.original_name);
- assert(item.original_name.len > 0);
- p.print(":");
- // this is unsound
- // this is technical debt
- // we need to handle symbol collisions for this
- p.print("$eXp0rT_");
- var buf: [16]u8 = undefined;
- p.print(std.fmt.bufPrint(&buf, "{}", .{bun.fmt.hexIntLower(symbol_counter)}) catch unreachable);
- symbol_counter +|= 1;
- }
-
- p.print("}=import.meta.require(");
- p.printImportRecordPath(import_record);
- p.print(")");
- p.printSemicolonAfterStatement();
- p.printWhitespacer(ws("export {"));
-
- // reset symbol counter back
- symbol_counter = p.symbol_counter;
-
- for (s.items, 0..) |item, i| {
- if (i > 0) {
- p.print(",");
- }
-
- // this is unsound
- // this is technical debt
- // we need to handle symbol collisions for this
- p.print("$eXp0rT_");
- var buf: [16]u8 = undefined;
- p.print(std.fmt.bufPrint(&buf, "{}", .{bun.fmt.hexIntLower(symbol_counter)}) catch unreachable);
- symbol_counter +|= 1;
- p.printWhitespacer(ws(" as "));
- p.print(item.alias);
- }
-
- p.print("}");
- p.printSemicolonAfterStatement();
- p.symbol_counter = symbol_counter;
- return;
- }
- }
-
p.printWhitespacer(ws("export {"));
if (!s.is_single_line) {
@@ -4755,47 +4711,17 @@ fn NewPrinter(
p.printGlobalBunImportStatement(s.*);
return;
},
- // .hardcoded => {
- // p.printHardcodedImportStatement(s.*);
- // return;
- // },
else => {},
}
}
- if (record.do_commonjs_transform_in_printer or record.path.is_disabled) {
- const require_ref = p.options.require_ref;
-
- const module_id = record.module_id;
-
- if (!record.path.is_disabled and std.mem.indexOfScalar(u32, p.imported_module_ids.items, module_id) == null) {
- p.printWhitespacer(ws("import * as"));
- p.print(" ");
- p.printModuleId(module_id);
- p.print(" ");
- p.printWhitespacer(ws("from "));
- p.print("\"");
- p.print(record.path.text);
- p.print("\"");
- p.printSemicolonAfterStatement();
- try p.imported_module_ids.append(module_id);
- }
-
+ if (record.path.is_disabled) {
if (record.contains_import_star) {
p.print("var ");
p.printSymbol(s.namespace_ref);
p.@"print = "();
- if (!record.path.is_disabled) {
- p.printSymbol(require_ref.?);
- p.print("(");
- p.printModuleId(module_id);
-
- p.print(");");
- p.printNewline();
- } else {
- p.printDisabledImport();
- p.printSemicolonAfterStatement();
- }
+ p.printDisabledImport();
+ p.printSemicolonAfterStatement();
}
if (s.items.len > 0 or s.default_name != null) {
@@ -4839,12 +4765,6 @@ fn NewPrinter(
if (record.contains_import_star) {
p.printSymbol(s.namespace_ref);
p.printSemicolonAfterStatement();
- } else if (!record.path.is_disabled) {
- p.printSymbol(require_ref.?);
- p.print("(");
- p.printModuleId(module_id);
- p.print(")");
- p.printSemicolonAfterStatement();
} else {
p.printDisabledImport();
p.printSemicolonAfterStatement();
@@ -5056,13 +4976,7 @@ fn NewPrinter(
unreachable;
const quote = bestQuoteCharForString(u8, import_record.path.text, false);
- if (import_record.print_namespace_in_path and import_record.module_id != 0) {
- p.print(quote);
- p.print(import_record.path.namespace);
- p.print(":");
- p.printModuleIdAssumeEnabled(import_record.module_id);
- p.print(quote);
- } else if (import_record.print_namespace_in_path and !import_record.path.isFile()) {
+ if (import_record.print_namespace_in_path and !import_record.path.isFile()) {
p.print(quote);
p.print(import_record.path.namespace);
p.print(":");
@@ -6433,8 +6347,9 @@ pub fn printWithWriterAndPlatform(
if (opts.module_type == .internal_kit_dev) {
printer.indent();
printer.printIndent();
- printer.fmt("{d}", .{source.path.hashForKit()}) catch bun.outOfMemory();
- printer.print(": function");
+ printer.print('"');
+ printer.printUTF8StringEscapedQuotes(source.path.pretty, '"');
+ printer.print('"');
printer.printFunc(parts[0].stmts[0].data.s_expr.value.data.e_function.func);
printer.print(",\n");
} else {
diff --git a/src/kit/DevServer.zig b/src/kit/DevServer.zig
index 804a8803b492c..4e9b6989e39b6 100644
--- a/src/kit/DevServer.zig
+++ b/src/kit/DevServer.zig
@@ -1,5 +1,5 @@
//! Instance of the development server. Controls an event loop, web server,
-//! bundling threads, and JavaScript VM instance. All data is held in memory.
+//! bundling state, and JavaScript VM instance. All work is cached in-memory.
//!
//! Currently does not have a `deinit()`, as it is assumed to be alive for the
//! remainder of this process' lifespan.
@@ -8,10 +8,11 @@ pub const DevServer = @This();
pub const Options = struct {
cwd: []u8,
routes: []Route,
+ framework: kit.Framework,
listen_config: uws.AppListenConfig = .{ .port = 3000 },
dump_sources: ?[]const u8 = if (Environment.isDebug) ".kit-debug" else null,
verbose_watcher: bool = false,
- // TODO: make it possible to inherit a js VM
+ // TODO: make it required to inherit a js VM
};
/// Accepting a custom allocator for all of DevServer would be misleading
@@ -19,7 +20,6 @@ pub const Options = struct {
const default_allocator = bun.default_allocator;
cwd: []const u8,
-dump_dir: ?std.fs.Dir,
// UWS App
app: *App,
@@ -33,16 +33,32 @@ listener: ?*App.ListenSocket,
// Server Runtime
server_global: *DevGlobalObject,
vm: *VirtualMachine,
+/// This is a handle to the server_fetch_function, which is shared
+/// across all loaded modules. Its type is `(Request, Id) => Response`
+server_fetch_function_callback: JSC.Strong,
+server_register_update_callback: JSC.Strong,
// Bundling
-bundle_thread: BundleThread,
+client_graph: IncrementalGraph(.client),
+server_graph: IncrementalGraph(.server),
+framework: kit.Framework,
+bun_watcher: *JSC.Watcher,
+server_bundler: Bundler,
+client_bundler: Bundler,
+ssr_bundler: Bundler,
+/// Stored and reused for bundling tasks
+log: Log,
+
+/// To reduce complexity of BundleV2's return type being different on
+/// compile-time logic, extra kit-specific metadata is returned through a
+/// pointer to DevServer, and writing directly to this field.
+///
+/// Only one bundle is run at a time (batched with all files needed),
+/// so there is never contention.
+bundle_result: ?ExtraBundleData,
-// // Watch + HMR
-// bun_watcher: *HotReloader.Watcher,
-/// Required by `bun.JSC.NewHotReloader`
-bundler: Bundler,
-/// Required by `Bundler`
-log_do_not_use: Log,
+// Debugging
+dump_dir: ?std.fs.Dir,
pub const internal_prefix = "/_bun";
pub const client_prefix = internal_prefix ++ "/client";
@@ -52,8 +68,10 @@ pub const Route = struct {
pattern: [:0]const u8,
entry_point: []const u8,
- server_bundle: BundlePromise(ServerBundle) = .unqueued,
- client_bundle: BundlePromise(ClientBundle) = .unqueued,
+ bundle: BundleState = .stale,
+ client_files: std.AutoArrayHashMapUnmanaged(IncrementalGraph(.client).Index, void) = .{},
+ server_files: std.AutoArrayHashMapUnmanaged(IncrementalGraph(.server).Index, void) = .{},
+ module_name_string: ?bun.String = null,
/// Assigned in DevServer.init
dev: *DevServer = undefined,
@@ -62,31 +80,46 @@ pub const Route = struct {
pub fn clientPublicPath(route: *const Route) []const u8 {
return route.client_bundled_url[0 .. route.client_bundled_url.len - "/client.js".len];
}
-};
-/// Prepared server-side bundle and loaded JavaScript module
-const ServerBundle = struct {
- files: []OutputFile,
- server_request_callback: JSC.JSValue,
+ pub const Index = enum(u32) { _ };
};
-/// Preparred client-side bundle.
-/// Namespaced to URL: `/_bun/client/:route_index/:file_path`
-const ClientBundle = struct {
- files: []OutputFile,
- /// Indexes into this are indexes into `files`.
- /// This is case insensitive because URL paths should be case insensitive.
- files_index: bun.CaseInsensitiveASCIIStringArrayHashMapUnmanaged(void),
-
- pub fn getFile(bundle: *ClientBundle, filename: []const u8) ?*OutputFile {
- return if (bundle.files_index.getIndex(filename)) |i|
- &bundle.files[i]
- else
- null;
+/// Three-way maybe state
+const BundleState = union(enum) {
+ /// Bundled assets are not prepared
+ stale,
+ /// Build failure
+ fail: Failure,
+
+ ready: Bundle,
+
+ fn reset(s: *BundleState) void {
+ switch (s.*) {
+ .stale => return,
+ .fail => |f| f.deinit(),
+ .ready => |b| b.deinit(),
+ }
+ s.* = .stale;
}
+
+ const NonStale = union(enum) {
+ /// Build failure
+ fail: Failure,
+ ready: Bundle,
+ };
+};
+
+const Bundle = struct {
+ /// Backed by default_allocator.
+ client_bundle: []const u8,
};
-pub fn init(options: Options) *DevServer {
+pub fn init(options: Options) !*DevServer {
+ {
+ @panic("Behavior Regressed due to Watcher Changes");
+ }
+
+ bun.analytics.Features.kit_dev +|= 1;
if (JSC.VirtualMachine.VMHolder.vm != null)
@panic("Cannot initialize kit.DevServer on a thread with an active JSC.VirtualMachine");
@@ -101,6 +134,8 @@ pub fn init(options: Options) *DevServer {
const app = App.create(.{});
+ const separate_ssr_graph = if (options.framework.server_components) |sc| sc.separate_ssr_graph else false;
+
const dev = bun.new(DevServer, .{
.cwd = options.cwd,
.app = app,
@@ -109,46 +144,44 @@ pub fn init(options: Options) *DevServer {
.port = @intCast(options.listen_config.port),
.hostname = options.listen_config.host orelse "localhost",
},
+ .server_fetch_function_callback = .{},
+ .server_register_update_callback = .{},
.listener = null,
- .bundle_thread = BundleThread.uninitialized,
+ .log = Log.init(default_allocator),
+ .client_graph = undefined,
+ .server_graph = undefined,
+ .dump_dir = dump_dir,
+ .framework = options.framework,
+ .bundle_result = null,
+
.server_global = undefined,
.vm = undefined,
- .dump_dir = dump_dir,
- // .bun_watcher = undefined,
- .bundler = undefined,
- .log_do_not_use = Log.init(bun.failing_allocator),
+ .bun_watcher = undefined,
+ .server_bundler = undefined,
+ .client_bundler = undefined,
+ .ssr_bundler = undefined,
});
- dev.bundler = bun.Bundler.init(
- default_allocator,
- &dev.log_do_not_use,
- std.mem.zeroes(bun.Schema.Api.TransformOptions),
- null, // TODO:
- ) catch bun.outOfMemory();
-
- const loaders = bun.options.loadersFromTransformOptions(default_allocator, null, .bun) catch
- bun.outOfMemory();
-
- dev.bundler.options = .{
- .entry_points = &.{},
- .define = dev.bundler.options.define,
- .loaders = loaders,
- .log = &dev.log_do_not_use,
- .output_dir = "", // this disables filesystem output
- .output_format = .internal_kit_dev,
- .out_extensions = bun.StringHashMap([]const u8).init(bun.failing_allocator),
-
- // unused by all code
- .resolve_mode = .dev,
- // technically used (in macro) but should be removed
- .transform_options = std.mem.zeroes(bun.Schema.Api.TransformOptions),
- };
- dev.bundler.configureLinker();
- dev.bundler.resolver.opts = dev.bundler.options;
+ dev.server_graph = .{ .owner = dev };
+ dev.client_graph = .{ .owner = dev };
- // const fs = bun.fs.FileSystem.init(options.cwd) catch @panic("Failed to init FileSystem");
+ // const fs = try bun.fs.FileSystem.init(options.cwd);
// dev.bun_watcher = HotReloader.init(dev, fs, options.verbose_watcher, false);
- // dev.bundler.resolver.watcher = dev.bun_watcher.getResolveWatcher();
+ // dev.server_bundler.resolver.watcher = dev.bun_watcher.getResolveWatcher();
+ // dev.client_bundler.resolver.watcher = dev.bun_watcher.getResolveWatcher();
+
+ try dev.initBundler(&dev.server_bundler, .server);
+ try dev.initBundler(&dev.client_bundler, .client);
+ if (separate_ssr_graph)
+ try dev.initBundler(&dev.ssr_bundler, .ssr);
+
+ dev.framework = dev.framework.resolve(
+ &dev.server_bundler.resolver,
+ &dev.client_bundler.resolver,
+ ) catch {
+ Output.errGeneric("Failed to resolve all imports required by the framework", .{});
+ return error.FrameworkInitialization;
+ };
dev.vm = VirtualMachine.initKit(.{
.allocator = default_allocator,
@@ -161,11 +194,6 @@ pub fn init(options: Options) *DevServer {
dev.vm.jsc = dev.vm.global.vm();
dev.vm.event_loop.ensureWaker();
- _ = JSC.WorkPool.get();
- const thread = dev.bundle_thread.spawn() catch |err|
- Output.panic("Failed to spawn bundler thread: {}", .{err});
- thread.detach();
-
var has_fallback = false;
for (options.routes, 0..) |*route, i| {
@@ -199,6 +227,57 @@ pub fn init(options: Options) *DevServer {
return dev;
}
+fn initBundler(dev: *DevServer, bundler: *Bundler, comptime renderer: kit.Renderer) !void {
+ const framework = dev.framework;
+
+ bundler.* = try bun.Bundler.init(
+ default_allocator, // TODO: this is likely a memory leak
+ &dev.log,
+ std.mem.zeroes(bun.Schema.Api.TransformOptions),
+ null, // TODO:
+ );
+
+ bundler.options.target = switch (renderer) {
+ .client => .browser,
+ .server, .ssr => .bun,
+ };
+ bundler.options.public_path = switch (renderer) {
+ .client => client_prefix,
+ .server, .ssr => dev.cwd,
+ };
+ bundler.options.entry_points = &.{};
+ bundler.options.log = &dev.log;
+ bundler.options.output_dir = ""; // this disables filesystem output;
+ bundler.options.entry_naming = "bundle.js"; // unused output file generation is skipped
+ bundler.options.output_format = .internal_kit_dev;
+ bundler.options.out_extensions = bun.StringHashMap([]const u8).init(bundler.allocator);
+ bundler.options.hot_module_reloading = true;
+
+ bundler.options.react_fast_refresh = renderer == .client and framework.react_fast_refresh != null;
+ bundler.options.server_components = framework.server_components != null;
+
+ bundler.options.conditions = try bun.options.ESMConditions.init(default_allocator, bundler.options.target.defaultConditions());
+ if (renderer == .server and framework.server_components != null) {
+ try bundler.options.conditions.appendSlice(&.{"react-server"});
+ }
+
+ bundler.options.tree_shaking = false;
+ bundler.options.minify_syntax = true;
+ bundler.options.minify_identifiers = false;
+ bundler.options.minify_whitespace = false;
+ bundler.options.kit = dev;
+
+ bundler.configureLinker();
+ try bundler.configureDefines();
+
+ try kit.addImportMetaDefines(default_allocator, bundler.options.define, .development, switch (renderer) {
+ .client => .client,
+ .server, .ssr => .server,
+ });
+
+ bundler.resolver.opts = bundler.options;
+}
+
pub fn runLoopForever(dev: *DevServer) noreturn {
const lock = dev.vm.jsc.getAPILock();
defer lock.release();
@@ -213,7 +292,7 @@ pub fn runLoopForever(dev: *DevServer) noreturn {
fn onListen(ctx: *DevServer, maybe_listen: ?*App.ListenSocket) void {
const listen: *App.ListenSocket = maybe_listen orelse {
- @panic("TODO: handle listen failure");
+ bun.todoPanic(@src(), "handle listen failure", .{});
};
ctx.listener = listen;
@@ -235,31 +314,196 @@ fn onAssetRequestInit(dev: *DevServer, req: *Request, resp: *Response) void {
return req.setYield(true);
break :route &dev.routes[i];
};
- const asset_name = req.parameter(1);
- dev.getOrEnqueueBundle(resp, route, .client, .{ .file_name = asset_name });
+ // const asset_name = req.parameter(1);
+ switch (route.dev.getRouteBundle(route)) {
+ .ready => |bundle| {
+ sendJavaScriptSource(bundle.client_bundle, resp);
+ },
+ .fail => |fail| {
+ fail.sendAsHttpResponse(resp, route);
+ },
+ }
}
fn onServerRequestInit(route: *Route, req: *Request, resp: *Response) void {
- _ = req;
- route.dev.getOrEnqueueBundle(resp, route, .server, .{});
+ switch (route.dev.getRouteBundle(route)) {
+ .ready => |ready| {
+ onServerRequestWithBundle(route, ready, req, resp);
+ },
+ .fail => |fail| {
+ fail.sendAsHttpResponse(resp, route);
+ },
+ }
}
-// uws with bundle handlers
+const ExtraBundleData = struct {};
+
+fn getRouteBundle(dev: *DevServer, route: *Route) BundleState.NonStale {
+ if (route.bundle == .stale) {
+ var fail: Failure = .{
+ .zig_error = error.FileNotFound,
+ };
+ route.bundle = bundle: {
+ const success = dev.performBundleAndWaitInner(route, &fail) catch |err| {
+ bun.handleErrorReturnTrace(err, @errorReturnTrace());
+ if (fail == .zig_error) {
+ if (dev.log.hasAny()) {
+ fail = Failure.fromLog(&dev.log);
+ } else {
+ fail = .{ .zig_error = err };
+ }
+ }
+ fail.printToConsole(route);
+ break :bundle .{ .fail = fail };
+ };
+ break :bundle .{ .ready = success };
+ };
+ }
+ return switch (route.bundle) {
+ .stale => unreachable,
+ .fail => |fail| .{ .fail = fail },
+ .ready => |ready| .{ .ready = ready },
+ };
+}
+
+/// Error handling is done either by writing to `fail` with a specific failure,
+/// or by appending to `dev.log`. The caller, `getRouteBundle`, will handle the
+/// error, including replying to the request as well as console logging.
+fn performBundleAndWaitInner(dev: *DevServer, route: *Route, fail: *Failure) !Bundle {
+ var heap = try ThreadlocalArena.init();
+ defer heap.deinit();
+
+ const allocator = heap.allocator();
+ var ast_memory_allocator = try allocator.create(bun.JSAst.ASTMemoryAllocator);
+ ast_memory_allocator.* = .{ .allocator = allocator };
+ ast_memory_allocator.reset();
+ ast_memory_allocator.push();
+
+ if (dev.framework.server_components == null) {
+ // The handling of the dependency graph is SLIGHTLY different. It's
+ // enough that it would be incorrect to let the current code execute at
+ // all.
+ bun.todoPanic(@src(), "support non-server components build", .{});
+ }
+
+ const bv2 = try BundleV2.init(
+ &dev.server_bundler,
+ if (dev.framework.server_components != null) .{
+ .framework = dev.framework,
+ .client_bundler = &dev.client_bundler,
+ .ssr_bundler = &dev.ssr_bundler,
+ } else @panic("TODO: support non-server components"),
+ allocator,
+ JSC.AnyEventLoop.init(allocator),
+ false, // reloading is handled separately
+ JSC.WorkPool.get(),
+ heap,
+ );
+ bv2.bun_watcher = dev.bun_watcher;
+ // this.plugins = completion.plugins;
+
+ defer {
+ if (bv2.graph.pool.pool.threadpool_context == @as(?*anyopaque, @ptrCast(bv2.graph.pool))) {
+ bv2.graph.pool.pool.threadpool_context = null;
+ }
+ ast_memory_allocator.pop();
+ bv2.deinit();
+ }
+
+ errdefer {
+ // Wait for wait groups to finish. There still may be ongoing work.
+ bv2.linker.source_maps.line_offset_wait_group.wait();
+ bv2.linker.source_maps.quoted_contents_wait_group.wait();
+ }
+
+ const output_files = try bv2.runFromJSInNewThread(&.{
+ route.entry_point,
+ dev.framework.entry_server.?,
+ }, &.{
+ dev.framework.entry_client.?,
+ });
-fn onAssetRequestWithBundle(route: *Route, resp: *Response, ctx: BundleKind.client.Context(), bundle: *ClientBundle) void {
- _ = route;
+ try dev.client_graph.ensureStaleBitCapacity();
+ try dev.server_graph.ensureStaleBitCapacity();
- const file = bundle.getFile(ctx.file_name) orelse
- return sendBuiltInNotFound(resp);
+ assert(output_files.items.len == 0);
- sendOutputFile(file, resp);
+ bv2.bundler.log.printForLogLevel(Output.errorWriter()) catch {};
+ bv2.client_bundler.log.printForLogLevel(Output.errorWriter()) catch {};
+
+ const server_bundle = try dev.server_graph.takeBundle(.initial_response);
+ defer default_allocator.free(server_bundle);
+
+ const client_bundle = try dev.client_graph.takeBundle(.initial_response);
+ errdefer default_allocator.free(client_bundle);
+
+ if (dev.log.hasAny()) {
+ dev.log.printForLogLevel(Output.errorWriter()) catch {};
+ }
+
+ const server_code = c.KitLoadServerCode(dev.server_global, bun.String.createLatin1(server_bundle));
+ dev.vm.waitForPromise(.{ .internal = server_code.promise });
+
+ switch (server_code.promise.unwrap(dev.vm.jsc, .mark_handled)) {
+ .pending => unreachable, // promise is settled
+ .rejected => |err| {
+ fail.* = Failure.fromJSServerLoad(err, dev.server_global.js());
+ return error.ServerJSLoad;
+ },
+ .fulfilled => |v| bun.assert(v == .undefined),
+ }
+
+ if (route.module_name_string == null) {
+ route.module_name_string = bun.String.createUTF8(bun.path.relative(dev.cwd, route.entry_point));
+ }
+
+ if (!dev.server_fetch_function_callback.has()) {
+ const default_export = c.KitGetRequestHandlerFromModule(dev.server_global, server_code.key);
+ if (!default_export.isObject())
+ @panic("Internal assertion failure: expected interface from HMR runtime to be an object");
+ const fetch_function: JSValue = default_export.get(dev.server_global.js(), "handleRequest") orelse
+ @panic("Internal assertion failure: expected interface from HMR runtime to contain handleRequest");
+ bun.assert(fetch_function.isCallable(dev.vm.jsc));
+ dev.server_fetch_function_callback = JSC.Strong.create(fetch_function, dev.server_global.js());
+ const register_update = default_export.get(dev.server_global.js(), "registerUpdate") orelse
+ @panic("Internal assertion failure: expected interface from HMR runtime to contain registerUpdate");
+ dev.server_register_update_callback = JSC.Strong.create(register_update, dev.server_global.js());
+
+ fetch_function.ensureStillAlive();
+ register_update.ensureStillAlive();
+ } else {
+ bun.todoPanic(@src(), "Kit: server's secondary bundle", .{});
+ }
+
+ return .{
+ .client_bundle = client_bundle,
+ };
}
-fn onServerRequestWithBundle(route: *Route, resp: *Response, ctx: BundleKind.server.Context(), bundle: *ServerBundle) void {
- _ = ctx; // autofix
+pub fn receiveChunk(
+ dev: *DevServer,
+ abs_path: []const u8,
+ side: kit.Renderer,
+ chunk: bun.bundle_v2.CompileResult,
+) !void {
+ return switch (side) {
+ .server => dev.server_graph.addChunk(abs_path, chunk, false),
+ .ssr => dev.server_graph.addChunk(abs_path, chunk, true),
+ .client => dev.client_graph.addChunk(abs_path, chunk, false),
+ };
+}
+
+// uws with bundle handlers
+
+fn onServerRequestWithBundle(route: *Route, bundle: Bundle, req: *Request, resp: *Response) void {
+ _ = bundle;
+ _ = req;
const dev = route.dev;
const global = dev.server_global.js();
+ const server_request_callback = dev.server_fetch_function_callback.get() orelse
+ unreachable; // did not bundle
+
const context = JSValue.createEmptyObject(global, 1);
context.put(
dev.server_global.js(),
@@ -267,18 +511,35 @@ fn onServerRequestWithBundle(route: *Route, resp: *Response, ctx: BundleKind.ser
bun.String.init(route.client_bundled_url).toJS(global),
);
- const result = bundle.server_request_callback.call(
+ var result = server_request_callback.call(
global,
.undefined,
- &.{context},
+ &.{
+ context,
+ route.module_name_string.?.toJS(dev.server_global.js()),
+ },
) catch |err| {
const exception = global.takeException(err);
const fail: Failure = .{ .request_handler = exception };
- fail.printToConsole(route, .server);
- fail.sendAsHttpResponse(resp, route, .server);
+ fail.printToConsole(route);
+ fail.sendAsHttpResponse(resp, route);
return;
};
+ if (result.asAnyPromise()) |promise| {
+ dev.vm.waitForPromise(promise);
+ switch (promise.unwrap(dev.vm.jsc, .mark_handled)) {
+ .pending => unreachable, // was waited for
+ .fulfilled => |r| result = r,
+ .rejected => |e| {
+ const fail: Failure = .{ .request_handler = e };
+ fail.printToConsole(route);
+ fail.sendAsHttpResponse(resp, route);
+ return;
+ },
+ }
+ }
+
// TODO: This interface and implementation is very poor. but fine until API
// considerations become important (as of writing, there are 3 dozen todo
// items before it)
@@ -290,8 +551,10 @@ fn onServerRequestWithBundle(route: *Route, resp: *Response, ctx: BundleKind.ser
// This would allow us to support all of the nice things `new Response` allows
const bun_string = result.toBunString(dev.server_global.js());
- if (bun_string.tag == .Dead) @panic("TODO NOT STRING");
defer bun_string.deref();
+ if (bun_string.tag == .Dead) {
+ bun.todoPanic(@src(), "Kit: support non-string return value", .{});
+ }
const utf8 = bun_string.toUTF8(default_allocator);
defer utf8.deinit();
@@ -326,357 +589,246 @@ fn sendOutputFile(file: *const OutputFile, resp: *Response) void {
}
}
+fn sendJavaScriptSource(code: []const u8, resp: *Response) void {
+ if (code.len == 0) {
+ resp.writeStatus("202 No Content");
+ resp.writeHeaderInt("Content-Length", 0);
+ resp.end("", true);
+ return;
+ }
+
+ resp.writeStatus("200 OK");
+ // TODO: CSS, Sourcemap
+ resp.writeHeader("Content-Type", MimeType.javascript.value);
+ resp.end(code, true); // TODO: You should never call res.end(huge buffer)
+}
+
fn sendBuiltInNotFound(resp: *Response) void {
const message = "404 Not Found";
resp.writeStatus("404 Not Found");
resp.end(message, true);
}
-// bundling
-
-const BundleKind = enum {
- client,
- server,
-
- fn Bundle(kind: BundleKind) type {
- return switch (kind) {
- .client => ClientBundle,
- .server => ServerBundle,
- };
- }
-
- /// Routing information from uws.Request is stack allocated.
- /// This union has no type tag because it can be inferred from surrounding data.
- fn Context(kind: BundleKind) type {
- return switch (kind) {
- .client => struct { file_name: []const u8 },
+/// The paradigm of Kit's incremental state is to store a separate list of files
+/// than the Graph in bundle_v2. When watch events happen, the bundler is run on
+/// the changed files, excluding non-stale files via `isFileStale`.
+///
+/// Upon bundle completion, both `client_graph` and `server_graph` have their
+/// `addChunk` methods called with all new chunks, counting the total length
+/// needed. A call to `takeBundle` joins all of the chunks, resulting in the
+/// code to send to client or evaluate on the server.
+///
+/// This approach was selected as it resulted in the fewest changes in the
+/// bundler. It also allows the bundler to avoid memory buildup by ensuring its
+/// arenas don't live too long.
+///
+/// Since all routes share the two graphs, bundling a new route that shared
+/// a module from a previously bundled route will perform the same exclusion
+/// behavior that rebuilds use. This also ensures that two routes on the server
+/// do not emit duplicate dependencies. By tracing `imports` on each file in
+/// the module graph recursively, the full bundle for any given route can
+/// be re-materialized (required when pressing Cmd+R after any client update)
+pub fn IncrementalGraph(side: kit.Side) type {
+ return struct {
+ owner: *DevServer,
+
+ bundled_files: bun.StringArrayHashMapUnmanaged(File) = .{},
+ stale_files: bun.bit_set.DynamicBitSetUnmanaged = .{},
+
+ server_is_rsc: if (side == .server) bun.bit_set.DynamicBitSetUnmanaged else void =
+ if (side == .server) .{},
+ server_is_ssr: if (side == .server) bun.bit_set.DynamicBitSetUnmanaged else void =
+ if (side == .server) .{},
+
+ /// Byte length of every file queued for concatenation
+ current_incremental_chunk_len: usize = 0,
+ current_incremental_chunk_parts: std.ArrayListUnmanaged(switch (side) {
+ .client => Index,
+ // these slices do not outlive the bundler, and must be joined
+ // before its arena is deinitialized.
+ .server => []const u8,
+ }) = .{},
+
+ /// An index into `bundled_files` or `stale_files`
+ pub const Index = enum(u32) { _ };
+
+ pub const File = switch (side) {
+ // The server's incremental graph does not store previously bundled
+ // code because there is only one instance of the server. Instead,
+ // it stores which
.server => struct {},
- };
- }
+ .client => struct {
+ /// allocated by default_allocator
+ code: []const u8,
+ // /// To re-assemble a stale bundle (browser hard-reload), follow this recursively
+ // imports: []Index,
- inline fn completionFunction(comptime kind: BundleKind) fn (*Route, *Response, kind.Context(), *kind.Bundle()) void {
- return switch (kind) {
- .client => onAssetRequestWithBundle,
- .server => onServerRequestWithBundle,
+ // routes: u32,
+ },
};
- }
- const AnyContext: type = @Type(.{
- .Union = .{
- .layout = .auto,
- .tag_type = null,
- .fields = &fields: {
- const values = std.enums.values(BundleKind);
- var fields: [values.len]std.builtin.Type.UnionField = undefined;
- for (&fields, values) |*field, kind| {
- field.* = .{
- .name = @tagName(kind),
- .type = kind.Context(),
- .alignment = @alignOf(kind.Context()),
- };
- }
- break :fields fields;
- },
- .decls = &.{},
- },
- });
+ pub fn addChunk(
+ g: *@This(),
+ abs_path: []const u8,
+ chunk: bun.bundle_v2.CompileResult,
+ is_ssr_graph: bool,
+ ) !void {
+ const code = chunk.code();
+ if (code.len == 0) return;
+
+ g.current_incremental_chunk_len += code.len;
+
+ if (g.owner.dump_dir) |dump_dir| {
+ const cwd = g.owner.cwd;
+ var a: bun.PathBuffer = undefined;
+ var b: [bun.MAX_PATH_BYTES * 2]u8 = undefined;
+ const rel_path = bun.path.relativeBufZ(&a, cwd, abs_path);
+ const size = std.mem.replacementSize(u8, rel_path, "../", "_.._/");
+ _ = std.mem.replace(u8, rel_path, "../", "_.._/", &b);
+ const rel_path_escaped = b[0..size];
+ dumpBundle(dump_dir, switch (side) {
+ .client => .client,
+ .server => if (is_ssr_graph) .ssr else .server,
+ }, rel_path_escaped, code, true) catch |err| {
+ bun.handleErrorReturnTrace(err, @errorReturnTrace());
+ Output.warn("Could not dump bundle: {}", .{err});
+ };
+ }
- inline fn initAnyContext(comptime kind: BundleKind, data: kind.Context()) AnyContext {
- return @unionInit(AnyContext, @tagName(kind), data);
- }
-};
+ const gop = try g.bundled_files.getOrPut(default_allocator, abs_path);
-/// This will either immediately call `kind.completionFunction()`, or schedule a
-/// task to call it when the bundle is ready. The completion function is allowed
-/// to use yield.
-fn getOrEnqueueBundle(
- dev: *DevServer,
- resp: *Response,
- route: *Route,
- comptime kind: BundleKind,
- ctx: kind.Context(),
-) void {
- // const bundler = &dev.bundler;
- const bundle = switch (kind) {
- .client => &route.client_bundle,
- .server => &route.server_bundle,
- };
+ switch (side) {
+ .client => {
+ if (gop.found_existing) {
+ bun.default_allocator.free(gop.value_ptr.code);
+ }
+ gop.value_ptr.* = .{
+ .code = chunk.code(),
+ // .imports = &.{},
+ };
+ try g.current_incremental_chunk_parts.append(default_allocator, @enumFromInt(gop.index));
+ },
+ .server => {
+ // TODO: THIS ALLOCATION STRATEGY SUCKS. IT DOESNT DESERVE TO SHIP
+ if (!gop.found_existing) {
+ try g.server_is_ssr.resize(default_allocator, gop.index + 1, false);
+ try g.server_is_rsc.resize(default_allocator, gop.index + 1, false);
+ }
- switch (bundle.*) {
- .unqueued => {
- // TODO: use an object pool for this. `bun.ObjectPool` needs a refactor before it can be used
- const cb = BundleTask.DeferredRequest.newNode(resp, kind.initAnyContext(ctx));
-
- const task = bun.new(BundleTask, .{
- .owner = dev,
- .route = route,
- .kind = kind,
- .plugins = null,
- .handlers = .{ .first = cb },
- });
- bundle.* = .{ .pending = task };
- dev.bundle_thread.enqueue(task);
- },
- .pending => |task| {
- const cb = BundleTask.DeferredRequest.newNode(resp, kind.initAnyContext(ctx));
- // This is not a data race, since this list is drained on
- // the same thread as this function is called.
- task.handlers.prepend(cb);
- },
- .failed => |fail| {
- fail.sendAsHttpResponse(resp, route, kind);
- },
- .value => |*val| {
- kind.completionFunction()(route, resp, ctx, val);
- },
- }
-}
+ try g.current_incremental_chunk_parts.append(default_allocator, chunk.code());
-const BundleThread = bun.bundle_v2.BundleThread(BundleTask);
-
-/// A request to bundle something for development. Has one or more pending HTTP requests.
-pub const BundleTask = struct {
- owner: *DevServer,
- route: *Route,
- kind: BundleKind,
- // env: *bun.DotEnv.Loader, // TODO
- plugins: ?*JSC.API.JSBundler.Plugin,
- handlers: DeferredRequest.List,
-
- next: ?*BundleTask = null,
- result: BundleV2.Result = .{ .pending = {} },
-
- // initialized in the task itself:
- concurrent_task: JSC.EventLoopTask = undefined,
- bundler: *BundleV2 = undefined,
- log: Log = undefined,
-
- /// There is no function pointer, route, or context on this struct as all of
- /// this information is inferable from the associated BundleTask
- const DeferredRequest = struct {
- /// When cancelled, this is set to null
- resp: ?*Response,
- /// Only valid if req is non-null
- ctx: BundleKind.AnyContext,
-
- fn newNode(resp: *Response, ctx: BundleKind.AnyContext) *DeferredRequest.List.Node {
- const node = bun.new(DeferredRequest.List.Node, .{
- .data = .{
- .resp = resp,
- .ctx = ctx,
+ const bitset = switch (is_ssr_graph) {
+ true => &g.server_is_ssr,
+ false => &g.server_is_rsc,
+ };
+ bitset.set(gop.index);
},
- });
- resp.onAborted(*DeferredRequest, onCancel, &node.data);
- return node;
+ }
}
- fn onCancel(node: *DeferredRequest, resp: *Response) void {
- node.resp = null;
- node.ctx = undefined;
- _ = resp;
+ pub fn ensureStaleBitCapacity(g: *@This()) !void {
+ try g.stale_files.resize(default_allocator, g.bundled_files.count(), false);
}
- const List = std.SinglyLinkedList(DeferredRequest);
- };
-
- pub fn completeOnMainThread(task: *BundleTask) void {
- switch (task.kind) {
- inline else => |kind| task.completeOnMainThreadWithKind(kind),
+ pub fn invalidate(g: *@This(), paths: []const []const u8, hashes: []const u32, out_paths: *DualArray([]const u8)) void {
+ for (paths, hashes) |path, hash| {
+ const ctx: bun.StringArrayHashMapContext.Prehashed = .{
+ .value = hash,
+ .input = path,
+ };
+ const index = g.bundled_files.getIndexAdapted(path, ctx) orelse
+ continue;
+ g.stale_files.set(index);
+ switch (side) {
+ .client => out_paths.appendLeft(path),
+ .server => out_paths.appendRight(path),
+ }
+ }
}
- }
- fn completeOnMainThreadWithKind(task: *BundleTask, comptime kind: BundleKind) void {
- const route = task.route;
- const bundle = switch (kind) {
- .client => &route.client_bundle,
- .server => &route.server_bundle,
+ const ChunkKind = enum {
+ initial_response,
+ hmr_chunk,
};
- assert(bundle.* == .pending);
-
- if (task.result == .err) {
- const fail = Failure.fromLog(&task.log);
- fail.printToConsole(route, kind);
- task.finishHttpRequestsFailure(&fail);
- bundle.* = .{ .failed = fail };
- return;
+ fn reset(g: *@This()) void {
+ g.current_incremental_chunk_len = 0;
+ g.current_incremental_chunk_parts.clearRetainingCapacity();
}
- if (task.log.hasAny()) {
- Output.warn("Warnings {s} for {s}", .{
- @tagName(task.kind),
- route.pattern,
- });
- task.log.printForLogLevel(Output.errorWriter()) catch {};
- }
-
- const files = task.result.value.output_files.items;
- bun.assert(files.len > 0);
-
- const dev = route.dev;
- if (dev.dump_dir) |dump_dir| {
- dumpBundle(dump_dir, route, kind, files) catch |err| {
- bun.handleErrorReturnTrace(err, @errorReturnTrace());
- Output.warn("Could not dump bundle: {}", .{err});
+ pub fn takeBundle(g: *@This(), kind: ChunkKind) ![]const u8 {
+ const runtime = switch (kind) {
+ .initial_response => bun.kit.getHmrRuntime(side),
+ .hmr_chunk => "({\n",
};
- }
-
- switch (kind) {
- .client => {
- // Set the capacity to the exact size required to avoid over-allocation
- var files_index: bun.CaseInsensitiveASCIIStringArrayHashMapUnmanaged(void) = .{};
- files_index.entries.setCapacity(default_allocator, files.len) catch bun.outOfMemory();
- files_index.entries.len = files.len;
- for (files_index.keys(), files) |*index_key, file| {
- var dest_path = file.dest_path;
- if (bun.strings.hasPrefixComptime(dest_path, "./")) {
- dest_path = dest_path[2..];
- }
- index_key.* = dest_path;
- }
- files_index.reIndex(default_allocator) catch bun.outOfMemory();
- bundle.* = .{ .value = .{
- .files = files,
- .files_index = files_index,
- } };
- },
- .server => {
- const entry_point = files[0];
- const code = entry_point.value.buffer.bytes;
-
- const server_code = c.KitLoadServerCode(dev.server_global, bun.String.createLatin1(code));
- dev.vm.waitForPromise(.{ .internal = server_code.promise });
-
- switch (server_code.promise.unwrap(dev.vm.jsc, .mark_handled)) {
- .pending => unreachable, // promise is settled
- .rejected => |err| {
- const fail = Failure.fromJSServerLoad(err, dev.server_global.js());
- fail.printToConsole(task.route, .server);
- task.finishHttpRequestsFailure(&fail);
- bundle.* = .{ .failed = fail };
- return;
+ // A small amount of metadata is present at the end of the chunk
+ // to inform the HMR runtime some crucial entry-point info. The
+ // upper bound of this can be calculated, but 64kb is given to
+ // ensure no problems.
+ //
+ // TODO: is a higher upper bound required on Windows?
+ // Alternate solution: calculate the upper bound by summing
+ // framework paths and then reusing that allocation.
+ var end_buf: [65536]u8 = undefined;
+ const end = end: {
+ var fbs = std.io.fixedBufferStream(&end_buf);
+ const w = fbs.writer();
+ switch (kind) {
+ .initial_response => {
+ w.writeAll("}, {\n main: ") catch unreachable;
+ const entry = switch (side) {
+ .server => g.owner.framework.entry_server,
+ .client => g.owner.framework.entry_client,
+ } orelse bun.todoPanic(@src(), "non-framework provided entry-point", .{});
+ bun.js_printer.writeJSONString(
+ bun.path.relative(g.owner.cwd, entry),
+ @TypeOf(w),
+ w,
+ .utf8,
+ ) catch unreachable;
+ w.writeAll("\n});") catch unreachable;
+ },
+ .hmr_chunk => {
+ w.writeAll("\n})") catch unreachable;
},
- .fulfilled => |v| bun.assert(v == .undefined),
- }
-
- const handler = c.KitGetRequestHandlerFromModule(dev.server_global, server_code.key);
-
- if (!handler.isCallable(dev.vm.jsc)) {
- @panic("TODO: handle not callable");
}
+ break :end fbs.getWritten();
+ };
- bundle.* = .{ .value = .{
- .files = files,
- .server_request_callback = handler,
- } };
- },
- }
-
- task.finishHttpRequestsSuccess(kind, &bundle.value);
- }
-
- fn finishHttpRequestsSuccess(task: *BundleTask, comptime kind: BundleKind, bundle: *kind.Bundle()) void {
- const func = comptime kind.completionFunction();
-
- while (task.handlers.popFirst()) |node| {
- defer bun.destroy(node);
- if (node.data.resp) |resp| {
- func(task.route, resp, @field(node.data.ctx, @tagName(kind)), bundle);
+ const files = g.bundled_files.values();
+
+ // This function performs one allocation, right here
+ var chunk = try std.ArrayListUnmanaged(u8).initCapacity(
+ default_allocator,
+ g.current_incremental_chunk_len + runtime.len + end.len,
+ );
+
+ chunk.appendSliceAssumeCapacity(runtime);
+ for (g.current_incremental_chunk_parts.items) |entry| {
+ chunk.appendSliceAssumeCapacity(switch (side) {
+ // entry is an index into files
+ .client => files[@intFromEnum(entry)].code,
+ // entry is the '[]const u8' itself
+ .server => entry,
+ });
}
- }
- }
-
- fn finishHttpRequestsFailure(task: *BundleTask, failure: *const Failure) void {
- while (task.handlers.popFirst()) |node| {
- defer bun.destroy(node);
- if (node.data.resp) |resp| {
- failure.sendAsHttpResponse(resp, task.route, task.kind);
+ chunk.appendSliceAssumeCapacity(end);
+ assert(chunk.capacity == chunk.items.len);
+
+ if (g.owner.dump_dir) |dump_dir| {
+ const rel_path_escaped = "latest_chunk.js";
+ dumpBundle(dump_dir, switch (side) {
+ .client => .client,
+ .server => .server,
+ }, rel_path_escaped, chunk.items, false) catch |err| {
+ bun.handleErrorReturnTrace(err, @errorReturnTrace());
+ Output.warn("Could not dump bundle: {}", .{err});
+ };
}
- }
- }
- pub fn configureBundler(task: *BundleTask, bundler: *Bundler, allocator: Allocator) !void {
- const dev = task.route.dev;
-
- bundler.* = try bun.Bundler.init(
- allocator,
- &task.log,
- std.mem.zeroes(bun.Schema.Api.TransformOptions),
- null, // TODO:
- );
-
- const define = bundler.options.define;
- bundler.options = dev.bundler.options;
-
- bundler.options.define = define;
- bundler.options.entry_points = (&task.route.entry_point)[0..1];
- bundler.options.log = &task.log;
- bundler.options.output_dir = ""; // this disables filesystem outpu;
- bundler.options.output_format = .internal_kit_dev;
- bundler.options.out_extensions = bun.StringHashMap([]const u8).init(bundler.allocator);
- bundler.options.react_fast_refresh = task.kind == .client;
-
- bundler.options.public_path = switch (task.kind) {
- .client => task.route.clientPublicPath(),
- .server => task.route.dev.cwd,
- };
- bundler.options.target = switch (task.kind) {
- .client => .browser,
- .server => .bun,
- };
- bundler.options.entry_naming = switch (task.kind) {
- // Always name it "client.{js/css}" so that the server can know
- // the entry-point script without waiting on a client bundle.
- .client => "client.[ext]",
- // For uniformity
- .server => "server.[ext]",
- };
- bundler.options.tree_shaking = false;
- bundler.options.minify_syntax = true;
-
- bundler.configureLinker();
- try bundler.configureDefines();
-
- // The following are from Vite: https://vitejs.dev/guide/env-and-mode
- // TODO: MODE, BASE_URL
- try bundler.options.define.insert(
- allocator,
- "import.meta.env.DEV",
- Define.Data.initBoolean(true),
- );
- try bundler.options.define.insert(
- allocator,
- "import.meta.env.PROD",
- Define.Data.initBoolean(false),
- );
- try bundler.options.define.insert(
- allocator,
- "import.meta.env.SSR",
- Define.Data.initBoolean(task.kind == .server),
- );
-
- bundler.resolver.opts = bundler.options;
- bundler.resolver.watcher = dev.bundler.resolver.watcher;
- }
-
- pub fn completeMini(task: *BundleTask, _: *void) void {
- task.completeOnMainThread();
- }
-
- pub fn completeOnBundleThread(task: *BundleTask) void {
- task.route.dev.vm.event_loop.enqueueTaskConcurrent(task.concurrent_task.js.from(task, .manual_deinit));
- }
-};
-
-/// Bundling should be concurrent, deduplicated, and cached.
-/// This acts as a sort of "native promise"
-fn BundlePromise(T: type) type {
- return union(enum) {
- unqueued,
- pending: *BundleTask,
- failed: Failure,
- value: T,
+ return chunk.items;
+ }
};
}
@@ -687,6 +839,7 @@ fn BundlePromise(T: type) type {
/// In the case a route was not able to fully compile, the `Failure` is stored
/// so that a browser refreshing the page can display this failure.
const Failure = union(enum) {
+ zig_error: anyerror,
/// Bundler and module resolution use `bun.logger` to report multiple errors at once.
bundler: std.ArrayList(bun.logger.Msg),
/// Thrown JavaScript exception while loading server code.
@@ -711,15 +864,14 @@ const Failure = union(enum) {
// TODO: deduplicate the two methods here. that isnt trivial because one has to
// style with ansi codes, and the other has to style with HTML.
- fn printToConsole(fail: *const Failure, route: *const Route, kind: BundleKind) void {
+ fn printToConsole(fail: *const Failure, route: *const Route) void {
defer Output.flush();
Output.prettyErrorln("", .{});
switch (fail.*) {
.bundler => |msgs| {
- Output.prettyErrorln("Errors while bundling {s}-side for '{s}'", .{
- @tagName(kind),
+ Output.prettyErrorln("Errors while bundling '{s}'", .{
route.pattern,
});
Output.flush();
@@ -730,6 +882,13 @@ const Failure = union(enum) {
Output.enable_ansi_colors_stderr,
) catch {};
},
+ .zig_error => |err| {
+ Output.prettyErrorln("Error while bundling '{s}': {s}", .{
+ route.pattern,
+ @errorName(err),
+ });
+ Output.flush();
+ },
.server_load => |strong| {
Output.prettyErrorln("Server route handler for '{s}' threw while loading", .{
route.pattern,
@@ -750,7 +909,7 @@ const Failure = union(enum) {
}
}
- fn sendAsHttpResponse(fail: *const Failure, resp: *Response, route: *const Route, kind: BundleKind) void {
+ fn sendAsHttpResponse(fail: *const Failure, resp: *Response, route: *const Route) void {
resp.writeStatus("500 Internal Server Error");
var buffer: [32768]u8 = undefined;
@@ -760,8 +919,7 @@ const Failure = union(enum) {
switch (fail.*) {
.bundler => |msgs| {
- writer.print("Errors while bundling {s}-side for '{s}'\n\n", .{
- @tagName(kind),
+ writer.print("Errors while bundling '{s}'\n\n", .{
route.pattern,
}) catch break :message null;
@@ -769,6 +927,9 @@ const Failure = union(enum) {
log.printForLogLevelWithEnableAnsiColors(writer, false) catch
break :message null;
},
+ .zig_error => |err| {
+ writer.print("Error while bundling '{s}': {s}\n", .{ route.pattern, @errorName(err) }) catch break :message null;
+ },
.server_load => |strong| {
writer.print("Server route handler for '{s}' threw while loading\n\n", .{
route.pattern,
@@ -795,22 +956,53 @@ const Failure = union(enum) {
};
// For debugging, it is helpful to be able to see bundles.
-fn dumpBundle(dump_dir: std.fs.Dir, route: *Route, kind: BundleKind, files: []OutputFile) !void {
- for (files) |file| {
- const name = bun.path.joinAbsString("/", &.{
- route.pattern,
- @tagName(kind),
- file.dest_path,
- }, .auto)[1..];
- var inner_dir = try dump_dir.makeOpenPath(bun.Dirname.dirname(u8, name).?, .{});
- defer inner_dir.close();
-
- switch (file.value) {
- .buffer => |buf| {
- try inner_dir.writeFile(.{ .data = buf.bytes, .sub_path = bun.path.basename(name) });
- },
- else => |t| Output.panic("TODO: implement dumping .{s}", .{@tagName(t)}),
- }
+fn dumpBundle(dump_dir: std.fs.Dir, side: kit.Renderer, rel_path: []const u8, chunk: []const u8, wrap: bool) !void {
+ const name = bun.path.joinAbsString("/", &.{
+ @tagName(side),
+ rel_path,
+ }, .auto)[1..];
+ var inner_dir = try dump_dir.makeOpenPath(bun.Dirname.dirname(u8, name).?, .{});
+ defer inner_dir.close();
+
+ const file = try inner_dir.createFile(bun.path.basename(name), .{});
+ defer file.close();
+
+ var bufw = std.io.bufferedWriter(file.writer());
+
+ try bufw.writer().print("// {s} bundled for {s}\n", .{
+ bun.fmt.quote(rel_path),
+ @tagName(side),
+ });
+ try bufw.writer().print("// Bundled at {d}, Bun " ++ bun.Global.package_json_version_with_canary ++ "\n", .{
+ std.time.nanoTimestamp(),
+ });
+
+ // Wrap in an object to make it valid syntax. Regardless, these files
+ // are never executable on their own as they contain only a single module.
+
+ if (wrap)
+ try bufw.writer().writeAll("({\n");
+
+ try bufw.writer().writeAll(chunk);
+
+ if (wrap)
+ try bufw.writer().writeAll("});\n");
+
+ try bufw.flush();
+}
+
+pub fn isFileStale(dev: *DevServer, path: []const u8, side: kit.Renderer) bool {
+ switch (side) {
+ inline else => |side_comptime| {
+ const g = switch (side_comptime) {
+ .client => &dev.client_graph,
+ .server => &dev.server_graph,
+ .ssr => &dev.server_graph,
+ };
+ const index = g.bundled_files.getIndex(path) orelse
+ return true; // non-existent files are considered stale
+ return g.stale_files.isSet(index);
+ },
}
}
@@ -843,18 +1035,23 @@ const DevWebSocket = struct {
dev: *DevServer,
pub fn onOpen(dw: *DevWebSocket, ws: AnyWebSocket) void {
- _ = ws.send("bun!", .binary, false, false);
- std.debug.print("open {*} {}\n", .{ dw, ws });
+ _ = dw; // autofix
+ _ = ws.send("bun!", .text, false, true);
+ _ = ws.subscribe("TODO");
}
pub fn onMessage(dw: *DevWebSocket, ws: AnyWebSocket, msg: []const u8, opcode: uws.Opcode) void {
- std.debug.print("message {*} {} {} '{s}'\n", .{ dw, ws, opcode, msg });
+ _ = dw; // autofix
+ _ = ws; // autofix
+ _ = msg; // autofix
+ _ = opcode; // autofix
}
pub fn onClose(dw: *DevWebSocket, ws: AnyWebSocket, exit_code: i32, message: []const u8) void {
+ _ = ws; // autofix
+ _ = exit_code; // autofix
+ _ = message; // autofix
defer bun.destroy(dw);
-
- std.debug.print("close {*} {} {} '{s}'\n", .{ dw, ws, exit_code, message });
}
};
@@ -882,11 +1079,101 @@ pub const c = struct {
extern fn KitGetRequestHandlerFromModule(global: *DevGlobalObject, module: *JSC.JSString) JSValue;
};
-pub fn reload(dev: *DevServer) void {
- // TODO: given no arguments, this method is absolutely useless. The watcher
- // must be augmented with more information.
- _ = dev;
- Output.warn("TODO: initiate hot reload", .{});
+pub fn reload(dev: *DevServer, reload_task: *const HotReloadTask) void {
+ dev.reloadWrap(reload_task) catch bun.todoPanic(@src(), "handle hot-reloading error", .{});
+}
+
+pub fn reloadWrap(dev: *DevServer, reload_task: *const HotReloadTask) !void {
+ const sfb = default_allocator;
+
+ const changed_file_paths = reload_task.paths[0..reload_task.count];
+ const changed_hashes = reload_task.hashes[0..reload_task.count];
+
+ defer for (changed_file_paths) |path| default_allocator.free(path);
+
+ var files_to_bundle = try DualArray([]const u8).initCapacity(sfb, changed_file_paths.len * 2);
+ defer files_to_bundle.deinit(sfb);
+ inline for (.{ &dev.server_graph, &dev.client_graph }) |g| {
+ g.invalidate(changed_file_paths, changed_hashes, &files_to_bundle);
+ }
+
+ bun.todoPanic(@src(), "rewire hot-bundling code", .{});
+
+ // const route = &dev.routes[0];
+
+ // const bundle_task = bun.new(BundleTask, .{
+ // .owner = dev,
+ // .route = route,
+ // .kind = .client,
+ // .plugins = null,
+ // .handlers = .{ .first = null },
+ // });
+ // assert(route.client_bundle != .pending); // todo: rapid reloads
+ // route.client_bundle = .{ .pending = bundle_task };
+ // dev.bundle_thread.enqueue(bundle_task);
+}
+
+pub fn bustDirCache(dev: *DevServer, path: []const u8) bool {
+ const a = dev.server_bundler.resolver.bustDirCache(path);
+ const b = dev.client_bundler.resolver.bustDirCache(path);
+ return a or b;
+}
+
+pub fn getLoaders(dev: *DevServer) *bun.options.Loader.HashTable {
+ // The watcher needs to know what loader to use for a file,
+ // therefore, we must ensure that server and client options
+ // use the same loader set.
+ return &dev.server_bundler.options.loaders;
+}
+
+/// A data structure to represent two arrays that share a known upper bound.
+/// The "left" array starts at the allocation start, and the "right" array
+/// starts at the allocation end.
+///
+/// An example use-case is having a list of files, but categorizing them
+/// into server/client. The total number of files is known.
+pub fn DualArray(T: type) type {
+ return struct {
+ items: []T,
+ left_end: u32,
+ right_start: u32,
+
+ pub fn initCapacity(allocator: Allocator, cap: usize) !@This() {
+ return .{
+ .items = try allocator.alloc(T, cap),
+ .left_end = 0,
+ .right_start = @intCast(cap),
+ };
+ }
+
+ pub fn deinit(a: @This(), allocator: Allocator) void {
+ allocator.free(a.items);
+ }
+
+ fn hasAny(a: @This()) bool {
+ return a.left_end != 0 or a.right_start != a.items.len;
+ }
+
+ pub fn left(a: @This()) []T {
+ return a.items[0..a.left_end];
+ }
+
+ pub fn right(a: @This()) []T {
+ return a.items[a.right_start..];
+ }
+
+ pub fn appendLeft(a: *@This(), item: T) void {
+ assert(a.left_end < a.right_start);
+ a.items[a.left_end] = item;
+ a.left_end += 1;
+ }
+
+ pub fn appendRight(a: *@This(), item: T) void {
+ assert(a.right_start > a.left_end);
+ a.right_start -= 1;
+ a.items[a.right_start] = item;
+ }
+ };
}
const std = @import("std");
@@ -896,6 +1183,8 @@ const bun = @import("root").bun;
const Environment = bun.Environment;
const assert = bun.assert;
+const kit = bun.kit;
+
const Log = bun.logger.Log;
const Bundler = bun.bundler.Bundler;
@@ -903,7 +1192,6 @@ const BundleV2 = bun.bundle_v2.BundleV2;
const Define = bun.options.Define;
const OutputFile = bun.options.OutputFile;
-// TODO: consider if using system output is not fit
const Output = bun.Output;
const uws = bun.uws;
@@ -923,3 +1211,5 @@ const JSInternalPromise = JSC.JSInternalPromise;
pub const HotReloader = JSC.NewHotReloader(DevServer, JSC.EventLoop, false);
pub const HotReloadTask = HotReloader.HotReloadTask;
+
+const ThreadlocalArena = @import("../mimalloc_arena.zig").Arena;
diff --git a/src/kit/KitDevGlobalObject.cpp b/src/kit/KitDevGlobalObject.cpp
index 7e3aee09777c1..8716fa3790dfd 100644
--- a/src/kit/KitDevGlobalObject.cpp
+++ b/src/kit/KitDevGlobalObject.cpp
@@ -1,36 +1,50 @@
#include "KitDevGlobalObject.h"
#include "JSNextTickQueue.h"
#include "JavaScriptCore/GlobalObjectMethodTable.h"
+#include "JavaScriptCore/JSInternalPromise.h"
#include "headers-handwritten.h"
namespace Kit {
-#define INHERIT_HOOK_METHOD(name) \
- Zig::GlobalObject::s_globalObjectMethodTable.name
+JSC::JSInternalPromise* moduleLoaderImportModule(
+ JSC::JSGlobalObject* jsGlobalObject,
+ JSC::JSModuleLoader*,
+ JSC::JSString* moduleNameValue,
+ JSC::JSValue parameters,
+ const JSC::SourceOrigin& sourceOrigin)
+{
+ // TODO: forward this to the runtime
+ JSC::VM&vm=jsGlobalObject->vm();
+ auto err = JSC::createTypeError(jsGlobalObject, WTF::makeString("Dynamic import should have been replaced with a hook into the module runtime"_s));
+ auto* promise = JSC::JSInternalPromise::create(vm, jsGlobalObject->internalPromiseStructure());
+ promise->reject(jsGlobalObject, err);
+ return promise;
+}
+
+#define INHERIT_HOOK_METHOD(name) Zig::GlobalObject::s_globalObjectMethodTable. name
-const JSC::GlobalObjectMethodTable DevGlobalObject::s_globalObjectMethodTable =
- {
- INHERIT_HOOK_METHOD(supportsRichSourceInfo),
- INHERIT_HOOK_METHOD(shouldInterruptScript),
- INHERIT_HOOK_METHOD(javaScriptRuntimeFlags),
- INHERIT_HOOK_METHOD(queueMicrotaskToEventLoop),
- INHERIT_HOOK_METHOD(shouldInterruptScriptBeforeTimeout),
- INHERIT_HOOK_METHOD(moduleLoaderImportModule),
- INHERIT_HOOK_METHOD(moduleLoaderResolve),
- INHERIT_HOOK_METHOD(moduleLoaderFetch),
- INHERIT_HOOK_METHOD(moduleLoaderCreateImportMetaProperties),
- INHERIT_HOOK_METHOD(moduleLoaderEvaluate),
- INHERIT_HOOK_METHOD(promiseRejectionTracker),
- INHERIT_HOOK_METHOD(reportUncaughtExceptionAtEventLoop),
- INHERIT_HOOK_METHOD(currentScriptExecutionOwner),
- INHERIT_HOOK_METHOD(scriptExecutionStatus),
- INHERIT_HOOK_METHOD(reportViolationForUnsafeEval),
- INHERIT_HOOK_METHOD(defaultLanguage),
- INHERIT_HOOK_METHOD(compileStreaming),
- INHERIT_HOOK_METHOD(instantiateStreaming),
- INHERIT_HOOK_METHOD(deriveShadowRealmGlobalObject),
- INHERIT_HOOK_METHOD(codeForEval),
- INHERIT_HOOK_METHOD(canCompileStrings),
+const JSC::GlobalObjectMethodTable DevGlobalObject::s_globalObjectMethodTable = {
+ INHERIT_HOOK_METHOD(supportsRichSourceInfo),
+ INHERIT_HOOK_METHOD(shouldInterruptScript),
+ INHERIT_HOOK_METHOD(javaScriptRuntimeFlags),
+ INHERIT_HOOK_METHOD(queueMicrotaskToEventLoop),
+ INHERIT_HOOK_METHOD(shouldInterruptScriptBeforeTimeout),
+ moduleLoaderImportModule,
+ INHERIT_HOOK_METHOD(moduleLoaderResolve),
+ INHERIT_HOOK_METHOD(moduleLoaderFetch),
+ INHERIT_HOOK_METHOD(moduleLoaderCreateImportMetaProperties),
+ INHERIT_HOOK_METHOD(moduleLoaderEvaluate),
+ INHERIT_HOOK_METHOD(promiseRejectionTracker),
+ INHERIT_HOOK_METHOD(reportUncaughtExceptionAtEventLoop),
+ INHERIT_HOOK_METHOD(currentScriptExecutionOwner),
+ INHERIT_HOOK_METHOD(scriptExecutionStatus),
+ INHERIT_HOOK_METHOD(reportViolationForUnsafeEval),
+ INHERIT_HOOK_METHOD(defaultLanguage),
+ INHERIT_HOOK_METHOD(compileStreaming),
+ INHERIT_HOOK_METHOD(instantiateStreaming),
+ INHERIT_HOOK_METHOD(deriveShadowRealmGlobalObject),
+ INHERIT_HOOK_METHOD(codeForEval),
+ INHERIT_HOOK_METHOD(canCompileStrings),
};
DevGlobalObject *
diff --git a/src/kit/KitSourceProvider.cpp b/src/kit/KitSourceProvider.cpp
index e28e1c4fd175e..4127efaa9ae65 100644
--- a/src/kit/KitSourceProvider.cpp
+++ b/src/kit/KitSourceProvider.cpp
@@ -15,7 +15,7 @@ namespace Kit {
extern "C" LoadServerCodeResult KitLoadServerCode(DevGlobalObject* global, BunString source) {
- String string = "kit://server/0/index.js"_s;
+ String string = "kit://server"_s;
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(KitSourceProvider::create(
source.toWTFString(),
diff --git a/src/kit/bun-framework-rsc/client.tsx b/src/kit/bun-framework-rsc/client.tsx
new file mode 100644
index 0000000000000..30277f62a57ed
--- /dev/null
+++ b/src/kit/bun-framework-rsc/client.tsx
@@ -0,0 +1 @@
+console.log('incredible');
\ No newline at end of file
diff --git a/src/kit/bun-framework-rsc/server.tsx b/src/kit/bun-framework-rsc/server.tsx
new file mode 100644
index 0000000000000..7c713dac40dab
--- /dev/null
+++ b/src/kit/bun-framework-rsc/server.tsx
@@ -0,0 +1,33 @@
+///
+import type { Kit } from "bun";
+import React from "react";
+import { PassThrough } from "node:stream";
+// @ts-ignore
+import { renderToPipeableStream } from "react-server-dom-webpack/server";
+import { renderToHtml } from './ssr' with { bunKitGraph: 'ssr' };
+import { serverManifest } from 'bun:kit/server';
+
+export default async function (request: Request, route: any, meta: Kit.RouteMetadata): Promise {
+ const Route = route.default;
+ const page = (
+
+
+
+ Bun + React Server Components
+ {meta.styles.map(url => )}
+
+
+
+ {meta.scripts.map(url => )}
+
+
+ );
+
+ const { pipe } = renderToPipeableStream(page, serverManifest);
+ const rscPayload = pipe(new PassThrough());
+ return new Response(await renderToHtml(rscPayload), {
+ headers: {
+ 'Content-Type': 'text/html; charset=utf8',
+ }
+ });
+}
diff --git a/src/kit/bun-framework-rsc/ssr.tsx b/src/kit/bun-framework-rsc/ssr.tsx
new file mode 100644
index 0000000000000..96e896e585a0e
--- /dev/null
+++ b/src/kit/bun-framework-rsc/ssr.tsx
@@ -0,0 +1,21 @@
+///
+import type { PassThrough } from 'node:stream';
+// @ts-ignore
+import { use } from "react";
+// @ts-ignore
+import { createFromNodeStream } from "react-server-dom-webpack/client";
+import { renderToReadableStream } from "react-dom/server";
+import { clientManifest } from 'bun:kit/server';
+
+export function renderToHtml(rscPayload: PassThrough): Promise {
+ // TODO: this does not implement proper streaming
+ const promise = createFromNodeStream(rscPayload, {
+ moduleMap: clientManifest,
+ moduleLoading: {
+ prefix: ""
+ }
+ });
+ const Async = () => use(promise);
+ // @ts-ignore
+ return renderToReadableStream();
+}
diff --git a/src/kit/hmr-module.ts b/src/kit/hmr-module.ts
index e21e80688cdf4..4516b6dcb4bb1 100644
--- a/src/kit/hmr-module.ts
+++ b/src/kit/hmr-module.ts
@@ -5,33 +5,61 @@ const registry = new Map();
export type ModuleLoadFunction = (module: HotModule) => void;
export type ExportsCallbackFunction = (new_exports: any) => void;
+export const enum State {
+ Loading,
+ Error,
+}
+
+export const enum LoadModuleType {
+ AssertPresent,
+ UserDynamic,
+}
+
/**
* This object is passed as the CommonJS "module", but has a bunch of
* non-standard properties that are used for implementing hot-module
- * reloading. It is unacceptable to depend
+ * reloading. It is unacceptable to depend on these properties, and
+ * it will not be considered a breaking change.
+ *
+ * TODO: consider property mangling on this to prevent people
+ * depending on the HMR internals
*/
export class HotModule {
exports: any = {};
+ _state = State.Loading;
_ext_exports = undefined;
__esModule = false;
- _import_meta?: ImportMeta;
+ _import_meta: ImportMeta | undefined = undefined;
+ _cached_failure: any = undefined;
constructor(public id: Id) {}
require(id: Id, onReload: null | ExportsCallbackFunction) {
- return loadModule(id).exports;
+ return loadModule(id, LoadModuleType.UserDynamic).exports;
}
importSync(id: Id, onReload: null | ExportsCallbackFunction) {
- const module = loadModule(id);
+ const module = loadModule(id, LoadModuleType.AssertPresent);
const { exports, __esModule } = module;
return __esModule ? exports : (module._ext_exports ??= { ...exports, default: exports });
}
+ async dynamicImport(specifier: string, opts?: ImportCallOptions) {
+ const module = loadModule(specifier, LoadModuleType.UserDynamic);
+ const { exports, __esModule } = module;
+ return __esModule
+ ? exports
+ : module._ext_exports ??= { ...exports, default: exports };
+ }
+
importMeta() {
return (this._import_meta ??= initImportMeta(this));
}
+
+ importBuiltin(id: string) {
+ return import.meta.require(id);
+ }
}
function initImportMeta(m: HotModule): ImportMeta {
@@ -45,21 +73,44 @@ function initImportMeta(m: HotModule): ImportMeta {
// registry.set(0, runtime);
// }
-export function loadModule(key: Id): HotModule {
+export function loadModule(key: Id, type: LoadModuleType): HotModule {
let module = registry.get(key);
- if (module) return module;
+ if (module) {
+ // Preserve failures until they are re-saved.
+ if (module._state == State.Error)
+ throw module._cached_failure;
+
+ return module;
+ }
module = new HotModule(key);
- registry.set(key, module);
const load = input_graph[key];
if (!load) {
- throw new Error(
- `Failed to load bundled module '${key}'. This is not a dynamic import, and therefore is a bug in Bun`,
- );
+ if(type == LoadModuleType.AssertPresent) {
+ throw new Error(`Failed to load bundled module '${key}'. This is not a dynamic import, and therefore is a bug in Bun Kit's bundler.`);
+ } else {
+ throw new Error(`Failed to resolve dynamic import '${key}'. In Bun Kit, all imports must be statically known at compile time so that the bundler can trace everything.`);
+ }
+ }
+ try {
+ registry.set(key, module);
+ load(module);
+ } catch (err) {
+ module._cached_failure = err;
+ module._state = State.Error;
+ throw err;
}
- load(module);
return module;
}
-runtimeHelpers.__name(HotModule.prototype.importSync, " importSync");
-runtimeHelpers.__name(HotModule.prototype.require, " require");
-runtimeHelpers.__name(loadModule, " loadModule");
+export function replaceModule(key: Id, load: ModuleLoadFunction) {
+ const module = registry.get(key);
+ if (module) {
+ module.exports = {};
+ load(module);
+ // TODO: repair live bindings
+ }
+}
+
+runtimeHelpers.__name(HotModule.prototype.importSync, ' importSync')
+runtimeHelpers.__name(HotModule.prototype.require, ' require')
+runtimeHelpers.__name(loadModule, ' loadModule')
diff --git a/src/kit/hmr-runtime-types.d.ts b/src/kit/hmr-runtime-types.d.ts
index f80f1d3cde6b7..03835f8d57ab4 100644
--- a/src/kit/hmr-runtime-types.d.ts
+++ b/src/kit/hmr-runtime-types.d.ts
@@ -1,9 +1,4 @@
-/*
- * A module id is an unsigned 52-bit numeric hash of the filepath.
- *
- * TODO: how resistant to hash collision is this? if it is not, an alternate approach must be taken.
- */
-type Id = number;
+type Id = string;
interface Config {
main: Id;
@@ -22,11 +17,19 @@ declare const config: Config;
* The runtime is bundled for server and client, which influences
* how hmr connection should be established, as well if there is
* a window to visually display errors with.
+ *
+ * TODO: rename this "side" to align with other code
*/
declare const mode: "client" | "server";
-/* What should be `export default`'d */
-declare var server_fetch_function: any;
+/*
+ * This variable becomes the default export. Kit uses this
+ * interface as opposed to a WebSocket connection.
+ */
+declare var server_exports: {
+ handleRequest: (req: any, id: Id) => any,
+ registerUpdate: (modules: any) => void,
+};
/*
* If you are running a debug build of Bun. These debug builds should provide
diff --git a/src/kit/hmr-runtime.ts b/src/kit/hmr-runtime.ts
index 2b6b4f8d763f7..15c15a96aa4ea 100644
--- a/src/kit/hmr-runtime.ts
+++ b/src/kit/hmr-runtime.ts
@@ -1,47 +1,90 @@
// This file is the entrypoint to the hot-module-reloading runtime
// In the browser, this uses a WebSocket to communicate with the bundler.
// On the server, communication is facilitated using a secret global.
-import { showErrorOverlay } from "./client/overlay";
-import { loadModule } from "./hmr-module";
+import { loadModule, LoadModuleType, replaceModule } from './hmr-module';
+import { showErrorOverlay } from './client/overlay';
if (typeof IS_BUN_DEVELOPMENT !== "boolean") {
throw new Error("DCE is configured incorrectly");
}
// Initialize client-side features.
-if (mode === "client") {
- const { refresh } = config;
+if (mode === 'client') {
+ var refresh_runtime: any;
+ // var { refresh } = config;
+ var refresh = "node_modules/react-refresh/cjs/react-refresh-runtime.development.js";
if (refresh) {
- const runtime = loadModule(refresh).exports;
- runtime.injectIntoGlobalHook(window);
+ refresh_runtime = loadModule(refresh, LoadModuleType.AssertPresent).exports;
+ refresh_runtime.injectIntoGlobalHook(window);
}
}
// Load the entry point module
try {
- const main = loadModule(config.main);
-
- // export it on the server side
- if (mode === "server") server_fetch_function = main.exports.default;
-
- if (mode === "client") {
- const ws = new WebSocket("/_bun/hmr");
- ws.onopen = ev => {
- console.log(ev);
- };
- ws.onmessage = ev => {
- console.log(ev);
- };
- ws.onclose = ev => {
- console.log(ev);
- };
- ws.onerror = ev => {
- console.log(ev);
+ const main = loadModule(config.main, LoadModuleType.AssertPresent);
+
+ if (mode === 'server') {
+ server_exports = {
+ async handleRequest({ clientEntryPoint }: any, requested_id: Id) {
+ const serverRenderer = main.exports.default;
+ if (!serverRenderer) {
+ throw new Error('Framework server entrypoint is missing a "default" export.');
+ }
+ if (typeof serverRenderer !== 'function') {
+ throw new Error('Framework server entrypoint\'s "default" export is not a function.');
+ }
+ // TODO: create the request object in Native code, consume Response in Native code
+ // The API that i have in mind is faked here for the time being.
+ const response = await serverRenderer(
+ new Request('http://localhost:3000'),
+ loadModule(requested_id, LoadModuleType.AssertPresent).exports,
+ {
+ styles: [],
+ scripts: [clientEntryPoint],
+ }
+ );
+ if (!(response instanceof Response)) {
+ throw new Error(`Server-side request handler was expected to return a Response object.`);
+ }
+ // TODO: support streaming
+ return await response.text();
+ },
+ registerUpdate(modules) {
+ throw new Error('TODO')
+ },
};
}
+
+ if (mode === 'client') {
+ const ws = new WebSocket('/_bun/hmr');
+ ws.onopen = (ev) => {
+ console.log('Open!');
+ }
+ ws.onmessage = (ev) => {
+ if(typeof ev.data === 'string') {
+ console.log(ev.data);
+ if(ev.data !== 'bun!') {
+ const evaluated = (0, eval)(ev.data);
+ for (const k in evaluated) {
+ input_graph[k] = evaluated[k];
+ }
+ for (const k in evaluated) {
+ replaceModule(k, evaluated[k]);
+ }
+ if (refresh) {
+ refresh_runtime.performReactRefresh();
+ }
+ }
+ }
+ }
+ ws.onclose = (ev) => {
+ console.log("Closed");
+ }
+ ws.onerror = (ev) => {
+ console.error(ev);
+ }
+ }
} catch (e) {
if (mode !== "client") throw e;
showErrorOverlay(e);
}
-
-export {};
diff --git a/src/kit/kit.d.ts b/src/kit/kit.d.ts
new file mode 100644
index 0000000000000..cc3a91631f5ed
--- /dev/null
+++ b/src/kit/kit.d.ts
@@ -0,0 +1,208 @@
+declare module 'bun' {
+ declare namespace Kit {
+ interface Options {
+ /**
+ * Use "react" to use the built-in React framework preset.
+ */
+ framework?: Framework | 'react' | undefined;
+
+ /**
+ * Route patterns must be statically known.
+ * TODO: Static at dev-server start is bad and this API must be revisited
+ */
+ routes: Record;
+
+ // TODO: expose bundler options. things like minifySyntax may not be disabled
+ }
+
+ /**
+ * A "Framework" in our eyes is simply a set of bundler options that a
+ * framework author would set in order to integrate framework code with the
+ * application. Many of the configuration options are paths, which are
+ * resolved as import specifiers. The first thing the bundler does is
+ * ensure that all import specifiers are fully resolved.
+ */
+ interface Framework {
+ /**
+ * This file is the true entrypoint of the server application. This module
+ * must `export default` a fetch function, which takes a request and the
+ * bundled route module, and returns a response. See `ServerEntryPoint`
+ *
+ * When `serverComponents` is configured, this can access the component
+ * manifest using the special 'bun:kit/server' import:
+ *
+ * import { serverManifest } from 'bun:kit/server'
+ */
+ serverEntryPoint: ImportSource;
+ /**
+ * This file is the true entrypoint of the client application.
+ *
+ * When `serverComponents` is configured, this can access the component
+ * manifest using the special 'bun:kit/client' import:
+ *
+ * import { clientManifest } from 'bun:kit/client'
+ */
+ clientEntryPoint: ImportSource;
+ /**
+ * Bun offers integration for React's Server Components with an
+ * interface that is generic enough to adapt to any framework.
+ */
+ serverComponents?: ServerComponentsOptions | undefined;
+ /**
+ * While it is unlikely that Fast Refresh is useful outside of
+ * React, it can be enabled regardless.
+ */
+ reactFastRefresh?: ReactFastRefreshOptions | undefined;
+ }
+
+ /**
+ * A high-level overview of what server components means exists
+ * in the React Docs: https://react.dev/reference/rsc/server-components
+ *
+ * When enabled, files with "use server" and "use client" directives will get
+ * special processing according to this object, in combination with the
+ * framework-specified entry points for server rendering and browser
+ * interactivity.
+ */
+ interface ServerComponentsOptions {
+ /**
+ * If you are unsure what to set this to for a custom server components
+ * framework, choose 'false'.
+ *
+ * When set `true`, when bundling "use client" components for SSR, these
+ * files will be placed in a separate bundling graph where `conditions` does
+ * not include `react-server`.
+ *
+ * The built in framework config for React enables this flag so that server
+ * components and client components, utilize their own versions of React,
+ * despite running in the same process. This facilitates different aspects
+ * of the server and client react runtimes, such as `async` components only
+ * being available on the server.
+ *
+ * To cross from the server graph to the SSR graph, use the bun_kit_graph
+ * import attribute:
+ *
+ * import * as ReactDOM from 'react-dom/server' with { bun_kit_graph: 'ssr' };
+ *
+ * Since these models are so subtley different, there is no default value
+ * provided for this.
+ */
+ separateSSRGraph: boolean;
+ /** Server components runtime for the server */
+ serverRuntimeImportSource: ImportSource;
+ /**
+ * When server code imports client code, a stub module is generated,
+ * where every export calls this export from `serverRuntimeImportSource`.
+ * This is used to implement client components on the server.
+ *
+ * The call is given three arguments:
+ *
+ * export const ClientComp = registerClientReference(
+ * // A function which may be passed through, it throws an error
+ * function () { throw new Error('Cannot call client-component on the server') },
+ *
+ * // The file path. In production, these use hashed strings for
+ * // compactness and code privacy.
+ * "src/components/Client.tsx",
+ *
+ * // The instance id. This is not guaranteed to match the export
+ * // name the user has given.
+ * "ClientComp",
+ * );
+ *
+ * Additionally, the bundler will assemble a component manifest to be used
+ * during rendering.
+ */
+ serverRegisterClientReferenceExport: string | undefined;
+ }
+
+ /** Customize the React Fast Refresh transform. */
+ interface ReactFastRefreshOptions {
+ /** @default "react-refresh/runtime" */
+ importSource: ImportSource | undefined;
+ }
+
+ /// Will be resolved from the point of view of the framework user's project root
+ /// Examples: `react-dom`, `./entry_point.tsx`, `/absolute/path.js`
+ type ImportSource = string;
+
+ interface ServerEntryPoint {
+ /// The framework implementation decides and enforces the shape
+ /// of the route module. Bun passes it as an opaque value.
+ default: (request: Request, routeModule: unknown, routeMetadata: RouteMetadata) => Response;
+ }
+
+ interface RouteMetadata {
+ /** A list of css files that the route will need to be styled */
+ styles: string[];
+ /** A list of js files that the route will need to be interactive */
+ scripts: string[];
+ }
+ }
+
+ // declare class Kit {
+ // constructor(options: Kit.Options);
+ // }
+}
+
+declare module 'bun:kit/server' {
+ // NOTE: The format of these manifests will likely be customizable in the future.
+
+ /**
+ * Entries in this manifest can be loaded by using dynamic `await import()` or
+ * `require`. The bundler always ensures that all modules are ready on the server.
+ */
+ declare const clientManifest: ReactClientManifest;
+ /**
+ * This follows the requirements for React's Server Components manifest, which
+ * does not actually include usable module specifiers. Calling `import()` on
+ * these specifiers wont work, but they will work client-side. Use
+ * `clientManifest` on the server for SSR.
+ */
+ declare const serverManifest: ReactServerManifest;
+
+ /** (insert teaser trailer) */
+ declare const actionManifest: never;
+}
+
+declare module 'bun:kit/client' {
+ /**
+ * Entries in this manifest can be loaded by using dynamic `await import()` or
+ * `require`. The bundler currently ensures that all modules are ready.
+ */
+ declare const clientManifest: ReactClientManifest;
+}
+
+declare interface ReactClientManifest {
+ [id: string]: {
+ [name: string]: {
+ /** Valid specifier to import */
+ specifier: string,
+ /** Export name */
+ name: string,
+ };
+ };
+}
+
+declare interface ReactServerManifest {
+ /**
+ * Concatenation of the component file ID and the instance id with '#'
+ * Example: 'components/Navbar.tsx#default' (dev) or 'l2#a' (prod/minified)
+ *
+ * The component file ID and the instance id are both passed to `registerClientReference`
+ */
+ [combinedComponentId: string]: {
+ /**
+ * The `id` in ReactClientManifest.
+ * Correlates but is not required to be the filename
+ */
+ id: string;
+ /**
+ * The `name` in ReactServerManifest
+ * Correlates but is not required to be the export name
+ */
+ name: string;
+ /** Currently not implemented; always an empty array */
+ chunks: [];
+ };
+}
\ No newline at end of file
diff --git a/src/kit/kit.zig b/src/kit/kit.zig
index 1ab5e7b4f654e..62d890e13fc6c 100644
--- a/src/kit/kit.zig
+++ b/src/kit/kit.zig
@@ -19,7 +19,7 @@ pub fn jsWipDevServer(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JS
if (!bun.FeatureFlags.kit) return .undefined;
bun.Output.warn(
- \\Be advised that Kit is highly experimental, and its API is subject to change
+ \\Be advised that Bun Kit is experimental, and its API is likely to change.
, .{});
bun.Output.flush();
@@ -38,6 +38,86 @@ pub fn jsWipDevServer(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JS
}
}
+/// A "Framework" in our eyes is simply set of bundler options that a framework
+/// author would set in order to integrate the framework with the application.
+///
+/// Full documentation on these fields is located in the TypeScript definitions.
+pub const Framework = struct {
+ entry_client: ?[]const u8 = null,
+ entry_server: ?[]const u8 = null,
+
+ server_components: ?ServerComponents = null,
+ react_fast_refresh: ?ReactFastRefresh = null,
+
+ /// Bun provides built-in support for using React as a framework
+ pub fn react() Framework {
+ return .{
+ .server_components = .{
+ .separate_ssr_graph = true,
+ .server_runtime_import = "react-server-dom-webpack/server",
+ .client_runtime_import = "react-server-dom-webpack/client",
+ },
+ .react_fast_refresh = .{
+ .import_source = "react-refresh/runtime",
+ },
+ // TODO: embed these in bun
+ .entry_client = "./entry_client.tsx",
+ .entry_server = "./entry_server.tsx",
+ };
+ }
+
+ const ServerComponents = struct {
+ separate_ssr_graph: bool = false,
+ server_runtime_import: []const u8,
+ client_runtime_import: []const u8,
+ server_register_client_reference: []const u8 = "registerClientReference",
+ server_register_server_reference: []const u8 = "registerServerReference",
+ client_register_server_reference: []const u8 = "registerServerReference",
+ };
+
+ const ReactFastRefresh = struct {
+ import_source: []const u8,
+ register_export: []const u8 = "register",
+ create_signature_export: []const u8 = "createSignatureFunctionForTransform",
+ inject_export: []const u8 = "injectIntoGlobalHook",
+ };
+
+ /// Given a Framework configuration, this returns another one with all modules resolved.
+ ///
+ /// All resolution errors will happen before returning error.ModuleNotFound
+ /// Details written into `r.log`
+ pub fn resolve(f: Framework, server: *bun.resolver.Resolver, client: *bun.resolver.Resolver) !Framework {
+ var clone = f;
+ var had_errors: bool = false;
+
+ if (clone.entry_client) |*path| resolveHelper(client, path, &had_errors);
+ if (clone.entry_server) |*path| resolveHelper(server, path, &had_errors);
+
+ if (clone.react_fast_refresh) |*react_fast_refresh| {
+ resolveHelper(client, &react_fast_refresh.import_source, &had_errors);
+ }
+
+ if (clone.server_components) |*sc| {
+ resolveHelper(server, &sc.server_runtime_import, &had_errors);
+ resolveHelper(client, &sc.client_runtime_import, &had_errors);
+ }
+
+ if (had_errors) return error.ModuleNotFound;
+
+ return clone;
+ }
+
+ inline fn resolveHelper(r: *bun.resolver.Resolver, path: *[]const u8, had_errors: *bool) void {
+ var result = r.resolve(r.fs.top_level_dir, path.*, .stmt) catch |err| {
+ bun.Output.err(err, "Failed to resolve '{s}' for framework", .{path.*});
+ had_errors.* = true;
+
+ return;
+ };
+ path.* = result.path().?.text; // TODO: what is the lifetime of this string
+ }
+};
+
// TODO: this function leaks memory and bad error handling, but that is OK since
// this API is not finalized.
fn devServerOptionsFromJs(global: *JSC.JSGlobalObject, options: JSValue) !DevServer.Options {
@@ -59,7 +139,7 @@ fn devServerOptionsFromJs(global: *JSC.JSGlobalObject, options: JSValue) !DevSer
const pattern = pattern_js.toBunString(global).toUTF8(bun.default_allocator);
defer pattern.deinit();
- // this dupe is stupid
+ // TODO: this dupe is stupid
const pattern_z = try bun.default_allocator.dupeZ(u8, pattern.slice());
const entry_point = entry_point_js.toBunString(global).toUTF8(bun.default_allocator).slice(); // leak
@@ -72,6 +152,7 @@ fn devServerOptionsFromJs(global: *JSC.JSGlobalObject, options: JSValue) !DevSer
return .{
.cwd = bun.getcwdAlloc(bun.default_allocator) catch bun.outOfMemory(),
.routes = routes,
+ .framework = Framework.react(),
};
}
@@ -83,11 +164,17 @@ export fn Bun__getTemporaryDevServer(global: *JSC.JSGlobalObject) JSValue {
pub fn wipDevServer(options: DevServer.Options) noreturn {
bun.Output.Source.configureNamedThread("Dev Server");
- const dev = DevServer.init(options);
+ const dev = DevServer.init(options) catch |err| switch (err) {
+ error.FrameworkInitialization => bun.Global.exit(1),
+ else => {
+ bun.handleErrorReturnTrace(err, @errorReturnTrace());
+ bun.Output.panic("Failed to init DevServer: {}", .{err});
+ },
+ };
dev.runLoopForever();
}
-pub fn getHmrRuntime(mode: enum { server, client }) []const u8 {
+pub fn getHmrRuntime(mode: Side) []const u8 {
return if (Environment.embed_code)
switch (mode) {
.client => @embedFile("kit-codegen/kit.client.js"),
@@ -98,6 +185,56 @@ pub fn getHmrRuntime(mode: enum { server, client }) []const u8 {
};
}
+pub const Mode = enum { production, development };
+pub const Side = enum { client, server };
+pub const Renderer = enum {
+ client,
+ server,
+ /// Only used Framework has .server_components.separate_ssr_graph set
+ ssr,
+};
+
+pub fn addImportMetaDefines(
+ allocator: std.mem.Allocator,
+ define: *bun.options.Define,
+ mode: Mode,
+ side: Side,
+) !void {
+ const Define = bun.options.Define;
+
+ // The following are from Vite: https://vitejs.dev/guide/env-and-mode
+ // TODO: MODE, BASE_URL
+ try define.insert(
+ allocator,
+ "import.meta.env.DEV",
+ Define.Data.initBoolean(mode == .development),
+ );
+ try define.insert(
+ allocator,
+ "import.meta.env.PROD",
+ Define.Data.initBoolean(mode == .production),
+ );
+ try define.insert(
+ allocator,
+ "import.meta.env.SSR",
+ Define.Data.initBoolean(side == .server),
+ );
+}
+
+pub const server_virtual_source: bun.logger.Source = .{
+ .path = bun.fs.Path.initWithNamespaceComptime("bun", "kit/server"),
+ .key_path = bun.fs.Path.initWithNamespaceComptime("bun", "kit/server"),
+ .contents = "", // Virtual
+ .index = bun.JSAst.Index.kit_server_data,
+};
+
+pub const client_virtual_source: bun.logger.Source = .{
+ .path = bun.fs.Path.initWithNamespaceComptime("bun", "kit/client"),
+ .key_path = bun.fs.Path.initWithNamespaceComptime("bun", "kit/client"),
+ .contents = "", // Virtual
+ .index = bun.JSAst.Index.kit_client_data,
+};
+
pub const DevServer = @import("./DevServer.zig");
const std = @import("std");
diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig
index 5443964f95a4d..2cc794de95d55 100644
--- a/src/libarchive/libarchive.zig
+++ b/src/libarchive/libarchive.zig
@@ -713,7 +713,7 @@ pub const Archive = struct {
}
}
// archive_read_data_into_fd reads in chunks of 1 MB
- // #define MAX_WRITE (1024 * 1024)
+ // #define MAX_WRITE (1024 * 1024)
if (comptime Environment.isLinux) {
if (size > 1_000_000) {
C.preallocate_file(
diff --git a/src/linker.zig b/src/linker.zig
index 074ab79dfbeb0..a90ebb9cee3bb 100644
--- a/src/linker.zig
+++ b/src/linker.zig
@@ -63,8 +63,6 @@ pub const Linker = struct {
plugin_runner: ?*PluginRunner = null,
- onImportCSS: ?OnImportCallback = null,
-
pub const runtime_source_path = "bun:wrap";
pub const TaggedResolution = struct {
@@ -599,7 +597,7 @@ pub const Linker = struct {
else => {},
}
if (had_resolve_errors) return error.ResolveMessage;
- result.ast.externals = try externals.toOwnedSlice();
+ externals.clearAndFree();
}
fn whenModuleNotFound(
@@ -621,7 +619,7 @@ pub const Linker = struct {
}
if (import_record.path.text.len > 0 and Resolver.isPackagePath(import_record.path.text)) {
- if (linker.options.target.isWebLike() and Options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
+ if (linker.options.target == .browser and Options.ExternalModules.isNodeBuiltin(import_record.path.text)) {
try linker.log.addResolveError(
&result.source,
import_record.range,
@@ -764,7 +762,7 @@ pub const Linker = struct {
return Fs.Path.init(try origin.joinAlloc(
linker.allocator,
- linker.options.routes.asset_prefix_path,
+ "",
dirname,
basename,
absolute_pathname.ext,
@@ -792,7 +790,7 @@ pub const Linker = struct {
import_record.path = try linker.generateImportPath(
source_dir,
- if (path.is_symlink and import_path_format == .absolute_url and linker.options.target.isNotBun()) path.pretty else path.text,
+ if (path.is_symlink and import_path_format == .absolute_url and !linker.options.target.isBun()) path.pretty else path.text,
loader == .file or loader == .wasm,
path.namespace,
origin,
@@ -804,9 +802,6 @@ pub const Linker = struct {
if (!linker.options.target.isBun())
_ = try linker.enqueueResolveResult(resolve_result);
- if (linker.onImportCSS) |callback| {
- callback(resolve_result, import_record, origin);
- }
// This saves us a less reliable string check
import_record.print_mode = .css;
},
@@ -820,7 +815,7 @@ pub const Linker = struct {
// if we're building for bun
// it's more complicated
// loader plugins could be executed between when this is called and the import is evaluated
- // but we want to preserve the semantics of "file" returning import paths for compatibiltiy with frontend frameworkss
+ // but we want to preserve the semantics of "file" returning import paths for compatibility with frontend frameworkss
if (!linker.options.target.isBun()) {
import_record.print_mode = .import_path;
}
diff --git a/src/logger.zig b/src/logger.zig
index f8e0865364643..04266db7d282f 100644
--- a/src/logger.zig
+++ b/src/logger.zig
@@ -1305,6 +1305,7 @@ pub inline fn usize2Loc(loc: usize) Loc {
pub const Source = struct {
path: fs.Path,
+ // TODO(@paperdave): delete key_path
key_path: fs.Path,
contents: string,
diff --git a/src/node_fallbacks.zig b/src/node_fallbacks.zig
index 1bb9a91ae99a1..b2808744a0366 100644
--- a/src/node_fallbacks.zig
+++ b/src/node_fallbacks.zig
@@ -61,6 +61,7 @@ const assert_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/assert/package.json", ""),
+ .side_effects = .false,
};
const buffer_package_json = PackageJSON{
.name = "buffer",
@@ -70,6 +71,7 @@ const buffer_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/buffer/package.json", ""),
+ .side_effects = .false,
};
const console_package_json = PackageJSON{
.name = "console",
@@ -79,6 +81,7 @@ const console_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/console/package.json", ""),
+ .side_effects = .false,
};
const constants_package_json = PackageJSON{
.name = "constants",
@@ -88,6 +91,7 @@ const constants_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/constants/package.json", ""),
+ .side_effects = .false,
};
const crypto_package_json = PackageJSON{
.name = "crypto",
@@ -97,6 +101,7 @@ const crypto_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/crypto/package.json", ""),
+ .side_effects = .false,
};
const domain_package_json = PackageJSON{
.name = "domain",
@@ -106,6 +111,7 @@ const domain_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/domain/package.json", ""),
+ .side_effects = .false,
};
const events_package_json = PackageJSON{
.name = "events",
@@ -115,6 +121,7 @@ const events_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/events/package.json", ""),
+ .side_effects = .false,
};
const http_package_json = PackageJSON{
.name = "http",
@@ -124,6 +131,7 @@ const http_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/http/package.json", ""),
+ .side_effects = .false,
};
const https_package_json = PackageJSON{
.name = "https",
@@ -133,6 +141,7 @@ const https_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/https/package.json", ""),
+ .side_effects = .false,
};
const net_package_json = PackageJSON{
.name = "net",
@@ -142,6 +151,7 @@ const net_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/net/package.json", ""),
+ .side_effects = .false,
};
const os_package_json = PackageJSON{
.name = "os",
@@ -151,6 +161,7 @@ const os_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/os/package.json", ""),
+ .side_effects = .false,
};
const path_package_json = PackageJSON{
.name = "path",
@@ -160,6 +171,7 @@ const path_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/path/package.json", ""),
+ .side_effects = .false,
};
const process_package_json = PackageJSON{
.name = "process",
@@ -169,6 +181,7 @@ const process_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/process/package.json", ""),
+ .side_effects = .false,
};
const punycode_package_json = PackageJSON{
.name = "punycode",
@@ -178,6 +191,7 @@ const punycode_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/punycode/package.json", ""),
+ .side_effects = .false,
};
const querystring_package_json = PackageJSON{
.name = "querystring",
@@ -187,6 +201,7 @@ const querystring_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/querystring/package.json", ""),
+ .side_effects = .false,
};
const stream_package_json = PackageJSON{
.name = "stream",
@@ -196,6 +211,7 @@ const stream_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/stream/package.json", ""),
+ .side_effects = .false,
};
const string_decoder_package_json = PackageJSON{
.name = "string_decoder",
@@ -205,12 +221,11 @@ const string_decoder_package_json = PackageJSON{
@setEvalBranchQuota(9999);
break :brk @as(u32, @truncate(bun.hash("string_decoder@0.0.0-polyfill")));
},
-
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/string_decoder/package.json", ""),
+ .side_effects = .false,
};
-
const sys_package_json = PackageJSON{
.name = "sys",
.version = "0.0.0-polyfill",
@@ -219,6 +234,7 @@ const sys_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/sys/package.json", ""),
+ .side_effects = .false,
};
const timers_package_json = PackageJSON{
.name = "timers",
@@ -228,6 +244,7 @@ const timers_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/timers/package.json", ""),
+ .side_effects = .false,
};
const tty_package_json = PackageJSON{
.name = "tty",
@@ -237,6 +254,7 @@ const tty_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/tty/package.json", ""),
+ .side_effects = .false,
};
const url_package_json = PackageJSON{
.name = "url",
@@ -246,6 +264,7 @@ const url_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/url/package.json", ""),
+ .side_effects = .false,
};
const util_package_json = PackageJSON{
.name = "util",
@@ -255,6 +274,7 @@ const util_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/util/package.json", ""),
+ .side_effects = .false,
};
const zlib_package_json = PackageJSON{
.name = "zlib",
@@ -264,6 +284,7 @@ const zlib_package_json = PackageJSON{
.main_fields = undefined,
.browser_map = undefined,
.source = logger.Source.initPathString("/bun-vfs/node_modules/zlib/package.json", ""),
+ .side_effects = .false,
};
pub const FallbackModule = struct {
diff --git a/src/options.zig b/src/options.zig
index 280d4e031185b..fa402cfa9500b 100644
--- a/src/options.zig
+++ b/src/options.zig
@@ -7,7 +7,6 @@ const Fs = @import("fs.zig");
const resolver = @import("./resolver/resolver.zig");
const api = @import("./api/schema.zig");
const Api = api.Api;
-const defines = @import("./defines.zig");
const resolve_path = @import("./resolver/resolve_path.zig");
const URL = @import("./url.zig").URL;
const ConditionsMap = @import("./resolver/package_json.zig").ESModule.ConditionsMap;
@@ -29,6 +28,7 @@ const Analytics = @import("./analytics/analytics_thread.zig");
const MacroRemap = @import("./resolver/package_json.zig").MacroMap;
const DotEnv = @import("./env_loader.zig");
+pub const defines = @import("./defines.zig");
pub const Define = defines.Define;
const assert = bun.assert;
@@ -384,12 +384,15 @@ pub const Target = enum {
bun_macro,
node,
+ /// This is used by kit.Framework.ServerComponents.separate_ssr_graph
+ kit_server_components_ssr,
+
pub const Map = bun.ComptimeStringMap(Target, .{
- .{ "browser", Target.browser },
- .{ "bun", Target.bun },
- .{ "bun_macro", Target.bun_macro },
- .{ "macro", Target.bun_macro },
- .{ "node", Target.node },
+ .{ "browser", .browser },
+ .{ "bun", .bun },
+ .{ "bun_macro", .bun_macro },
+ .{ "macro", .bun_macro },
+ .{ "node", .node },
});
pub fn fromJS(global: *JSC.JSGlobalObject, value: JSC.JSValue, exception: JSC.C.ExceptionRef) ?Target {
@@ -405,39 +408,25 @@ pub const Target = enum {
return switch (this) {
.node => .node,
.browser => .browser,
- .bun => .bun,
+ .bun, .kit_server_components_ssr => .bun,
.bun_macro => .bun_macro,
};
}
pub inline fn isServerSide(this: Target) bool {
return switch (this) {
- .bun_macro, .node, .bun => true,
+ .bun_macro, .node, .bun, .kit_server_components_ssr => true,
else => false,
};
}
pub inline fn isBun(this: Target) bool {
return switch (this) {
- .bun_macro, .bun => true,
+ .bun_macro, .bun, .kit_server_components_ssr => true,
else => false,
};
}
- pub inline fn isNotBun(this: Target) bool {
- return switch (this) {
- .bun_macro, .bun => false,
- else => true,
- };
- }
-
- pub inline fn isClient(this: Target) bool {
- return switch (this) {
- .bun_macro, .bun => false,
- else => true,
- };
- }
-
pub inline fn isNode(this: Target) bool {
return switch (this) {
.node => true,
@@ -445,60 +434,38 @@ pub const Target = enum {
};
}
- pub inline fn supportsBrowserField(this: Target) bool {
- return switch (this) {
- .browser => true,
- else => false,
- };
- }
-
- const browser_define_value_true = "true";
- const browser_define_value_false = "false";
-
pub inline fn processBrowserDefineValue(this: Target) ?string {
return switch (this) {
- .browser => browser_define_value_true,
- .bun_macro, .bun, .node => browser_define_value_false,
+ .browser => "true",
+ else => "false",
};
}
- pub inline fn isWebLike(target: Target) bool {
+ pub fn kitRenderer(target: Target) bun.kit.Renderer {
return switch (target) {
- .browser => true,
- else => false,
+ .browser => .client,
+ .kit_server_components_ssr => .ssr,
+ .bun_macro, .bun, .node => .server,
};
}
- pub const Extensions = struct {
- pub const In = struct {
- pub const JavaScript = [_]string{ ".js", ".cjs", ".mts", ".cts", ".ts", ".tsx", ".jsx", ".json" };
- };
- pub const Out = struct {
- pub const JavaScript = [_]string{
- ".js",
- ".mjs",
- };
- };
- };
-
pub fn outExtensions(target: Target, allocator: std.mem.Allocator) bun.StringHashMap(string) {
var exts = bun.StringHashMap(string).init(allocator);
- const js = Extensions.Out.JavaScript[0];
- const mjs = Extensions.Out.JavaScript[1];
+ const out_extensions_list = [_][]const u8{ ".js", ".cjs", ".mts", ".cts", ".ts", ".tsx", ".jsx", ".json" };
if (target == .node) {
- exts.ensureTotalCapacity(Extensions.In.JavaScript.len * 2) catch unreachable;
- for (Extensions.In.JavaScript) |ext| {
- exts.put(ext, mjs) catch unreachable;
+ exts.ensureTotalCapacity(out_extensions_list.len * 2) catch unreachable;
+ for (out_extensions_list) |ext| {
+ exts.put(ext, ".mjs") catch unreachable;
}
} else {
- exts.ensureTotalCapacity(Extensions.In.JavaScript.len + 1) catch unreachable;
- exts.put(mjs, js) catch unreachable;
+ exts.ensureTotalCapacity(out_extensions_list.len + 1) catch unreachable;
+ exts.put(".mjs", ".js") catch unreachable;
}
- for (Extensions.In.JavaScript) |ext| {
- exts.put(ext, js) catch unreachable;
+ for (out_extensions_list) |ext| {
+ exts.put(ext, ".js") catch unreachable;
}
return exts;
@@ -556,6 +523,7 @@ pub const Target = enum {
array.set(Target.browser, &listc);
array.set(Target.bun, &listd);
array.set(Target.bun_macro, &listd);
+ array.set(Target.kit_server_components_ssr, &listd);
// Original comment:
// The neutral target is for people that don't want esbuild to try to
@@ -566,21 +534,25 @@ pub const Target = enum {
break :brk array;
};
- pub const DefaultConditions: std.EnumArray(Target, []const string) = brk: {
+ pub const default_conditions: std.EnumArray(Target, []const string) = brk: {
var array = std.EnumArray(Target, []const string).initUndefined();
- array.set(Target.node, &[_]string{
+ array.set(Target.node, &.{
"node",
});
- array.set(Target.browser, &[_]string{
+ array.set(Target.browser, &.{
"browser",
"module",
});
- array.set(Target.bun, &[_]string{
+ array.set(Target.bun, &.{
+ "bun",
+ "node",
+ });
+ array.set(Target.kit_server_components_ssr, &.{
"bun",
"node",
});
- array.set(Target.bun_macro, &[_]string{
+ array.set(Target.bun_macro, &.{
"macro",
"bun",
"node",
@@ -588,6 +560,10 @@ pub const Target = enum {
break :brk array;
};
+
+ pub fn defaultConditions(t: Target) []const []const u8 {
+ return default_conditions.get(t);
+ }
};
pub const Format = enum {
@@ -607,12 +583,12 @@ pub const Format = enum {
/// Kit's uses a special module format for Hot-module-reloading. It includes a
/// runtime payload, sourced from src/kit/hmr-runtime.ts.
///
- /// ((input_graph, entry_point_key) => {
+ /// ((input_graph, config) => {
/// ... runtime code ...
- /// })([
- /// "module1.ts"(require, module) { ... },
- /// "module2.ts"(require, module) { ... },
- /// ], "module1.ts");
+ /// })({
+ /// "module1.ts"(module) { ... },
+ /// "module2.ts"(module) { ... },
+ /// }, { metadata });
internal_kit_dev,
pub fn keepES6ImportExportSyntax(this: Format) bool {
@@ -993,10 +969,6 @@ pub const JSX = struct {
/// /** @jsxImportSource @emotion/core */
classic_import_source: string = "react",
package_name: []const u8 = "react",
- // https://github.com/facebook/react/commit/2f26eb85d657a08c21edbac1e00f9626d68f84ae
- refresh_runtime: string = "react-refresh/runtime",
- supports_fast_refresh: bool = true,
- use_embedded_refresh_runtime: bool = false,
development: bool = true,
parse: bool = true,
@@ -1042,7 +1014,9 @@ pub const JSX = struct {
}
pub fn isReactLike(pragma: *const Pragma) bool {
- return strings.eqlComptime(pragma.package_name, "react") or strings.eqlComptime(pragma.package_name, "@emotion/jsx") or strings.eqlComptime(pragma.package_name, "@emotion/react");
+ return strings.eqlComptime(pragma.package_name, "react") or
+ strings.eqlComptime(pragma.package_name, "@emotion/jsx") or
+ strings.eqlComptime(pragma.package_name, "@emotion/react");
}
pub fn setImportSource(pragma: *Pragma, allocator: std.mem.Allocator) void {
@@ -1143,7 +1117,6 @@ pub const JSX = struct {
pragma.classic_import_source = pragma.package_name;
}
- pragma.supports_fast_refresh = if (pragma.runtime == .solid) false else pragma.supports_fast_refresh;
pragma.development = jsx.development;
pragma.parse = true;
return pragma;
@@ -1153,18 +1126,6 @@ pub const JSX = struct {
pub const Runtime = api.Api.JsxRuntime;
};
-const TypeScript = struct {
- parse: bool = false,
-};
-
-pub const Timings = struct {
- resolver: i128 = 0,
- parse: i128 = 0,
- print: i128 = 0,
- http: i128 = 0,
- read_file: i128 = 0,
-};
-
pub const DefaultUserDefines = struct {
// This must be globally scoped so it doesn't disappear
pub const NodeEnv = struct {
@@ -1452,8 +1413,7 @@ pub const BundleOptions = struct {
trim_unused_imports: ?bool = null,
mark_builtins_as_external: bool = false,
- react_server_components: bool = false,
- react_server_components_boundary: string = "",
+ server_components: bool = false,
hot_module_reloading: bool = false,
react_fast_refresh: bool = false,
inject: ?[]string = null,
@@ -1468,7 +1428,6 @@ pub const BundleOptions = struct {
write: bool = false,
preserve_symlinks: bool = false,
preserve_extensions: bool = false,
- timings: Timings = Timings{},
production: bool = false,
serve: bool = false,
@@ -1477,7 +1436,6 @@ pub const BundleOptions = struct {
append_package_version_in_query_string: bool = false,
- resolve_mode: api.Api.ResolveMode,
tsconfig_override: ?string = null,
target: Target = Target.browser,
main_fields: []const string = Target.DefaultMainFields.get(Target.browser),
@@ -1492,8 +1450,6 @@ pub const BundleOptions = struct {
main_field_extension_order: []const string = &Defaults.MainFieldExtensionOrder,
out_extensions: bun.StringHashMap(string),
import_path_format: ImportPathFormat = ImportPathFormat.relative,
- framework: ?Framework = null,
- routes: RouteConfig = RouteConfig.zero(),
defines_loaded: bool = false,
env: Env = Env{},
transform_options: Api.TransformOptions,
@@ -1534,6 +1490,10 @@ pub const BundleOptions = struct {
compile: bool = false,
+ /// Set when Kit is bundling. This changes the interface of the bundler
+ /// from emitting OutputFile to emitting the lower level []CompileResult
+ kit: ?*bun.kit.DevServer = null,
+
/// This is a list of packages which even when require() is used, we will
/// instead convert to ESM import statements.
///
@@ -1564,10 +1524,6 @@ pub const BundleOptions = struct {
pub inline fn cssImportBehavior(this: *const BundleOptions) Api.CssInJsBehavior {
switch (this.target) {
.browser => {
- if (this.framework) |framework| {
- return framework.client_css_in_js;
- }
-
return .auto_onimportcss;
},
else => return .facade,
@@ -1611,11 +1567,6 @@ pub const BundleOptions = struct {
return this.loaders.get(ext) orelse .file;
}
- pub fn isFrontendFrameworkEnabled(this: *const BundleOptions) bool {
- const framework: *const Framework = &(this.framework orelse return false);
- return framework.resolved and (framework.client.isEnabled() or framework.fallback.isEnabled());
- }
-
pub const ImportPathFormat = enum {
relative,
absolute_url,
@@ -1698,7 +1649,6 @@ pub const BundleOptions = struct {
) !BundleOptions {
var opts: BundleOptions = BundleOptions{
.log = log,
- .resolve_mode = transform.resolve orelse .dev,
.define = undefined,
.loaders = try loadersFromTransformOptions(allocator, transform.loaders, Target.from(transform.target)),
.output_dir = transform.output_dir orelse "out",
@@ -1735,7 +1685,7 @@ pub const BundleOptions = struct {
opts.main_fields = Target.DefaultMainFields.get(opts.target);
}
- opts.conditions = try ESMConditions.init(allocator, Target.DefaultConditions.get(opts.target));
+ opts.conditions = try ESMConditions.init(allocator, opts.target.defaultConditions());
if (transform.conditions.len > 0) {
opts.conditions.appendSlice(transform.conditions) catch bun.outOfMemory();
@@ -1759,14 +1709,6 @@ pub const BundleOptions = struct {
else => {},
}
- if (transform.framework) |_framework| {
- opts.framework = try Framework.fromApi(_framework, allocator);
- }
-
- if (transform.router) |routes| {
- opts.routes = try RouteConfig.fromApi(routes, allocator);
- }
-
if (transform.main_fields.len > 0) {
opts.main_fields = transform.main_fields;
}
@@ -1794,7 +1736,6 @@ pub const BundleOptions = struct {
opts.polyfill_node_globals = opts.target == .browser;
- Analytics.Features.filesystem_router += @as(usize, @intFromBool(opts.routes.routes_enabled));
Analytics.Features.macros += @as(usize, @intFromBool(opts.target == .bun_macro));
Analytics.Features.external += @as(usize, @intFromBool(transform.external.len > 0));
return opts;
@@ -2389,127 +2330,6 @@ pub const EntryPoint = struct {
}
};
-pub const Framework = struct {
- client: EntryPoint = EntryPoint{},
- server: EntryPoint = EntryPoint{},
- fallback: EntryPoint = EntryPoint{},
-
- display_name: string = "",
- /// "version" field in package.json
- version: string = "",
- /// "name" field in package.json
- package: string = "",
- development: bool = true,
- resolved: bool = false,
- from_bundle: bool = false,
-
- resolved_dir: string = "",
- override_modules: Api.StringMap,
- override_modules_hashes: []u64 = &[_]u64{},
-
- client_css_in_js: Api.CssInJsBehavior = .auto_onimportcss,
-
- pub const fallback_html: string = @embedFile("./fallback.html");
-
- pub fn platformEntryPoint(this: *const Framework, target: Target) ?*const EntryPoint {
- const entry: *const EntryPoint = switch (target) {
- .browser => &this.client,
- .bun => &this.server,
- .node => return null,
- };
-
- if (entry.kind == .disabled) return null;
- return entry;
- }
-
- pub fn fromLoadedFramework(loaded: Api.LoadedFramework, allocator: std.mem.Allocator) !Framework {
- var framework = Framework{
- .package = loaded.package,
- .development = loaded.development,
- .from_bundle = true,
- .client_css_in_js = loaded.client_css_in_js,
- .display_name = loaded.display_name,
- .override_modules = loaded.override_modules,
- };
-
- if (loaded.entry_points.fallback) |fallback| {
- try framework.fallback.fromLoaded(fallback, allocator, .fallback);
- }
-
- if (loaded.entry_points.client) |client| {
- try framework.client.fromLoaded(client, allocator, .client);
- }
-
- if (loaded.entry_points.server) |server| {
- try framework.server.fromLoaded(server, allocator, .server);
- }
-
- return framework;
- }
-
- pub fn toAPI(
- this: *const Framework,
- allocator: std.mem.Allocator,
- toplevel_path: string,
- ) !?Api.LoadedFramework {
- if (this.client.kind == .disabled and this.server.kind == .disabled and this.fallback.kind == .disabled) return null;
-
- return Api.LoadedFramework{
- .package = this.package,
- .development = this.development,
- .display_name = this.display_name,
- .entry_points = .{
- .client = try this.client.toAPI(allocator, toplevel_path, .client),
- .fallback = try this.fallback.toAPI(allocator, toplevel_path, .fallback),
- .server = try this.server.toAPI(allocator, toplevel_path, .server),
- },
- .client_css_in_js = this.client_css_in_js,
- .override_modules = this.override_modules,
- };
- }
-
- pub fn needsResolveFromPackage(this: *const Framework) bool {
- return !this.resolved and this.package.len > 0;
- }
-
- pub fn fromApi(
- transform: Api.FrameworkConfig,
- allocator: std.mem.Allocator,
- ) !Framework {
- var client = EntryPoint{};
- var server = EntryPoint{};
- var fallback = EntryPoint{};
-
- if (transform.client) |_client| {
- try client.fromAPI(_client, allocator, .client);
- }
-
- if (transform.server) |_server| {
- try server.fromAPI(_server, allocator, .server);
- }
-
- if (transform.fallback) |_fallback| {
- try fallback.fromAPI(_fallback, allocator, .fallback);
- }
-
- return Framework{
- .client = client,
- .server = server,
- .fallback = fallback,
- .package = transform.package orelse "",
- .display_name = transform.display_name orelse "",
- .development = transform.development orelse true,
- .override_modules = transform.override_modules orelse .{ .keys = &.{}, .values = &.{} },
- .resolved = false,
- .client_css_in_js = switch (transform.client_css_in_js orelse .auto_onimportcss) {
- .facade_onimportcss => .facade_onimportcss,
- .facade => .facade,
- else => .auto_onimportcss,
- },
- };
- }
-};
-
pub const RouteConfig = struct {
dir: string = "",
possible_dirs: []const string = &[_]string{},
diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig
index 95c64776f14ce..cbeb1d2351b8c 100644
--- a/src/resolver/package_json.zig
+++ b/src/resolver/package_json.zig
@@ -148,9 +148,9 @@ pub const PackageJSON = struct {
pub const SideEffects = union(enum) {
/// either `package.json` is missing "sideEffects", it is true, or some
/// other unsupported value. Treat all files as side effects
- unspecified: void,
+ unspecified,
/// "sideEffects": false
- false: void,
+ false,
/// "sideEffects": ["file.js", "other.js"]
map: Map,
// /// "sideEffects": ["side_effects/*.js"]
@@ -727,7 +727,7 @@ pub const PackageJSON = struct {
}
// Read the "browser" property, but only when targeting the browser
- if (r.opts.target.supportsBrowserField()) {
+ if (r.opts.target == .browser) {
// We both want the ability to have the option of CJS vs. ESM and the
// option of having node vs. browser. The way to do this is to use the
// object literal form of the "browser" field like this:
diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig
index 6f444080e56d8..ee5e5c320c2d4 100644
--- a/src/resolver/resolver.zig
+++ b/src/resolver/resolver.zig
@@ -155,7 +155,6 @@ pub const PathPair = struct {
}
};
-// this is ripped from esbuild, comments included
pub const SideEffects = enum {
/// The default value conservatively considers all files to have side effects.
has_side_effects,
@@ -621,7 +620,7 @@ pub const Resolver = struct {
.fs = _fs,
.log = log,
.extension_order = opts.extension_order.default.default,
- .care_about_browser_field = opts.target.isWebLike(),
+ .care_about_browser_field = opts.target == .browser,
};
}
diff --git a/src/router.zig b/src/router.zig
index 1d903befbdedf..f5316982ed651 100644
--- a/src/router.zig
+++ b/src/router.zig
@@ -997,7 +997,6 @@ pub const Test = struct {
}
const opts = Options.BundleOptions{
- .resolve_mode = .lazy,
.target = .browser,
.loaders = undefined,
.define = undefined,
@@ -1054,7 +1053,6 @@ pub const Test = struct {
}
const opts = Options.BundleOptions{
- .resolve_mode = .lazy,
.target = .browser,
.loaders = undefined,
.define = undefined,
diff --git a/src/runtime/errors.ts b/src/runtime/errors.ts
deleted file mode 100644
index 830ed3881d819..0000000000000
--- a/src/runtime/errors.ts
+++ /dev/null
@@ -1,79 +0,0 @@
-// @ts-nocheck
-var __BuildLog;
-var __ResolveLog;
-var __ImportKind;
-{
- enum ImportKind {
- entry_point = 0,
- stmt = 1,
- require = 2,
- dynamic = 3,
- require_resolve = 4,
- at = 5,
- at_conditional = 6,
- url = 7,
- }
-
- type ErrorPosition = {
- file: string;
- namespace: string;
- line: number; // 1-based
- column: number; // 0-based, byte offset relative to lineText
- length: number; // in bytes
- /** line of text, possibly empty */
- lineText: string;
- /** byte offset relative to the start of the file */
- offset: number;
- };
-
- interface BuildErrorImplementation {
- position: ErrorPosition;
- name: string;
- message: string;
- }
-
- interface ResolveErrorImplementation extends BuildErrorImplementation {
- specifier: string;
- importKind: ImportKind;
- }
-
- class BuildMessage extends Error {
- constructor(data: BuildErrorImplementation) {
- super(data.message);
- this.name = data.name;
- this.data = data;
- }
- data: BuildLogImplementation;
-
- get position() {
- return this.data.position;
- }
-
- get [Symbol.toStringTag]() {
- return `${this.name}: ${this.message}`;
- }
- }
-
- class ResolveMessage extends BuildMessage {
- constructor(data: ResolveErrorImplementation) {
- super(data);
- this.name = data.name;
- this.data = data;
- }
- data: ResolveErrorImplementation;
-
- get importKind() {
- return this.data.importKind;
- }
-
- get specifier() {
- return this.data.specifier || "";
- }
- }
-
- __ResolveLog = ResolveMessage;
- __BuildLog = BuildMessage;
- __ImportKind = ImportKind;
-}
-
-export { __BuildLog as BuildMessage, __ImportKind as ImportKind, __ResolveLog as ResolveMessage };
diff --git a/src/runtime/hmr.ts b/src/runtime/hmr.ts
deleted file mode 100644
index 61dc9a93a5b38..0000000000000
--- a/src/runtime/hmr.ts
+++ /dev/null
@@ -1,1672 +0,0 @@
-// @ts-nocheck
-import { ByteBuffer } from "peechy";
-import * as API from "../api/schema";
-
-var __HMRModule, __FastRefreshModule, __HMRClient, __injectFastRefresh;
-if (typeof window !== "undefined") {
- var textEncoder: TextEncoder;
- // We add a scope here to minimize chances of namespace collisions
- var runOnce = false;
- var clientStartTime = 0;
- var empty: Uint8Array;
-
- function formatDuration(duration: number) {
- return Math.round(duration * 1000) / 1000;
- }
-
- enum CSSImportState {
- Pending,
- Loading,
- Loaded,
- }
- var registryMap = new Map();
- type HTMLStylableElement = HTMLLinkElement | HTMLStyleElement;
- type CSSHMRInsertionPoint = {
- id: number;
- node?: HTMLStylableElement;
- file: string;
- bundle_id: number;
- sheet: CSSStyleSheet;
- };
-
- enum CSSUpdateMethod {
- // CSS OM allows synchronous style updates
- cssObjectModel,
- // Blob URLs allow us to skip converting to JavaScript strings
- // However, they run asynchronously. Frequent updates cause FOUC
- blobURL,
- }
-
- enum ReloadBehavior {
- fullReload,
- hotReload,
- ignore,
- }
-
- const FastRefreshLoader = {
- RefreshRuntime: null,
- isUpdateInProgress: false,
- hasInjectedFastRefresh: false,
-
- performFullRefresh() {
- HMRClient.client.performFullReload();
- },
-
- async hotReload() {
- if (FastRefreshLoader.isUpdateInProgress) return;
-
- try {
- FastRefreshLoader.isUpdateInProgress = true;
- } finally {
- FastRefreshLoader.isUpdateInProgress = false;
- }
- },
- };
-
- const BUN_ERROR_FAVICON =
- "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIwAAACMCAMAAACZHrEMAAAAnFBMVEUAAAD////////////////////////////////////////////////////////////////////////////////////vjo798/Psdnb2wMD62dn3wMDrdnb3wcHjNzf4zc3mUFD////+8vL85ubugoLqaWnxm5vkRET1tLT75ubtgoLlQ0Pwj4/zp6foXV3vj4/zqKjsdXXnUFD62tqV2W1BAAAAFnRSTlMAYN8gEEDvv6B/gHAwz59Qr4+Qb7DQOIES+QAABFRJREFUeF7smNluwjAQRYEskJKwiPfxlg3o3v//t0owTlMV2+LBNw/lfMHRoHtIMvv3PHiQlFmWJbPpyYplrq5Um+2UJmnBIpZ8k0yuMmYanXKhbpKv8S4H5WSfgl0K5aFKp3PRRyGEdthAXUzb0AVZCzWwxLmUPyoNjZC9shSwTQ87eu/oN7VWTIb+kWr6gzS2xxiXRDFvRB4bTG5W7NLSTRobvxR4GEMOXtjmCSBTsUxDDs6aT5NEd5mzS09OatbdRJexs5bkRoDmffAehmkwHU74MEYSTX6a1ZAYL5JlFtBZh+a9BhympgBnHb18Gbt8UZDWUT7srJmzseXD9S5cvh38MPh5F45/61D5gL0Ln2YLnHW4fCmwd+F5Aw+DL9+WXQTR/afZA2YdxrBNAuodft5p4DDh8kF6hz/NMOuO7qVnmzlq1t3r6dQ4PKW25YP0rmuvOeklpHw732GkUZbaXz7ArNnF/ZL5bMsXv3e1GiG8umX0wwg1pos8b/sYo2/3Tqsxjb98sXvHFrm68OEtXxX7MebIGeHL+Mu3jtA7r0yEebsOE5aJWL6lLUhAhvkMfsyK+hgjHDIRPmY5eueXiTTvb2bNbidhIIjClkJBEUGDUH5k+2NLAQUj7/9uXnCx6elCppOdkXNPcjKzc2b3o7YwRDMEmCWJqSowI1aaV8g7nhmEWVKYCs3kJJglh6m+wYwQzHom8buSYgaTjz/WR7oZQ4RZUphqBmZIMEsKU+1JZnC8mZjqZ9PCzEIAZkHe0c1UZJglQ2NiMMOGWfy8w8pjT/njzS8MmiklYFaPjqmOYIYMsyQwVQJmfMMszDu6mU8GsfHJ7w50M5h8Apgqgwel39KEUSssX+HSpsMs//S5IAwTGWbx8w4vEWcGxveO5S9fFGWlf4wfwAImaVsUvyQbCLPIYy0ihFkSWF6EVXfFC4PJR8s7ndKMSFheWgiz+GONiHyx25kKdgFjvLl5h29K3B/85ENMtW7j5WzXwfzIKU3oYaxxGdDPGsIsb3mXpDWdWOPtqzBx3YzxlHxhh30MrXIPyQd5J24GYRZzrFF7fpsQZvHzDu/jzF9nztJ0I1ZhEKO13a+lc6ACwPJkHZapVdz615lroKbYc7qbCr4nZ5QGNlQEPW+jpFpm6W41g/htVZqeg2xuN9oyzXkaQ5f0NGsuqCHAHj3FzRP8JHTbJPOdzl2YSQhmtCszuoszUzRTL/jvaZo2c2aub+bDASQiWE1qsgls1Qf2IC28ldd20xDgg5Zy19buAoZWLkzgfsCdVBMvd1+Ce/Aq1NA2vwb3JupukjP84QP4V7NTRZ7CiYGBumiZKLTI3uSdj+03yxPMZyLqZL2ylN82CSfKKjOmkpGpfQDUCa/gxUmqK/SCnVLWe3iLA3dUvbzdBvbhQM9KH1iI006kZYWk8WAkfGwnw+5fO3BsAgAAwkAwgukC7r+tM1jY/ekgnvrhtK4AAMACYkDwBoJSQAkAAAAASUVORK5CYII=";
- const BunError = {
- module: null,
- prom: null,
- cancel: false,
- lastError: null,
- previousFavicon: "",
- setErrorFavicon() {
- if (typeof document === "undefined" || BunError.previousFavicon) return;
-
- // we wrap this in a try / catch because if for some reason an error occurs because of this
- // we don't want it to break the rest of the error handling code
- // if they set a CSP header, that could cause this to fail
- try {
- let linkTag = document.querySelector("link[rel='icon']");
- BunError.previousFavicon = (linkTag && linkTag.getAttribute("href")) || "/favicon.ico";
- if (!linkTag) {
- linkTag = document.createElement("link");
- linkTag.setAttribute("rel", "icon");
- linkTag.setAttribute("href", BUN_ERROR_FAVICON);
- document.head.appendChild(linkTag);
- return;
- }
-
- linkTag.setAttribute("href", BUN_ERROR_FAVICON);
- } catch (e) {}
- },
-
- clearErrorFavicon() {
- if (typeof document === "undefined") return;
- if (BunError.previousFavicon) {
- try {
- const linkTag = document.querySelector("link[rel='icon']");
- if (linkTag) {
- linkTag.setAttribute("href", BunError.previousFavicon);
- }
-
- BunError.previousFavicon = "";
- } catch (exception) {}
- }
- },
-
- render(error, cwd) {
- BunError.setErrorFavicon();
- if ("__BunRenderBuildError" in globalThis) {
- globalThis.__BunRenderBuildError(error, cwd);
- return;
- }
-
- BunError.lastError = [error, cwd];
- BunError.cancel = false;
-
- if (!BunError.module) {
- if (BunError.prom) return;
- BunError.prom = import("/bun:error.js").then(mod => {
- BunError.module = mod;
- !BunError.cancel && BunError.render(BunError.lastError[0], BunError.lastError[1]);
- });
- return;
- }
-
- const { renderBuildFailure, renderRuntimeError } = BunError.module;
- if (typeof BunError.lastError[0] === "string" || BunError.lastError[0] instanceof Error) {
- renderRuntimeError(BunError.lastError[0], BunError.lastError[1]);
- } else {
- renderBuildFailure(BunError.lastError[0], BunError.lastError[1]);
- }
- },
-
- clear() {
- BunError.lastError = null;
- BunError.cancel = true;
- BunError.clearErrorFavicon();
-
- if (BunError.module) {
- const { clearBuildFailure } = BunError.module;
- clearBuildFailure();
- } else if ("__BunClearBuildFailure" in globalThis) {
- globalThis.__BunClearBuildFailure();
- }
- },
- };
-
- class CSSLoader {
- hmr: HMRClient;
- private static cssLoadId: CSSHMRInsertionPoint = {
- id: 0,
- bundle_id: 0,
- node: null,
- file: "",
- sheet: null,
- };
-
- updateMethod: CSSUpdateMethod;
- decoder: TextDecoder;
-
- constructor() {
- if ("replaceSync" in CSSStyleSheet.prototype) {
- this.updateMethod = CSSUpdateMethod.cssObjectModel;
- } else {
- this.updateMethod = CSSUpdateMethod.blobURL;
- }
- }
-
- // This is a separate function because calling a small function 2000 times is more likely to cause it to be JIT'd
- // We want it to be JIT'd
- // It's possible that returning null may be a de-opt though.
- private findMatchingSupportsRule(
- rule: CSSSupportsRule,
- id: number,
- sheet: CSSStyleSheet,
- ): CSSHMRInsertionPoint | null {
- switch (rule.type) {
- // 12 is result.SUPPORTS_RULE
- case 12: {
- if (!rule.conditionText.startsWith("(hmr-wid:")) {
- return null;
- }
-
- const startIndex = "hmr-wid:".length + 1;
- const endIDRegion = rule.conditionText.indexOf(")", startIndex);
- if (endIDRegion === -1) return null;
-
- const int = parseInt(rule.conditionText.substring(startIndex, endIDRegion), 10);
-
- if (int !== id) {
- return null;
- }
-
- let startFileRegion = rule.conditionText.indexOf('(hmr-file:"', endIDRegion);
- if (startFileRegion === -1) return null;
- startFileRegion += '(hmr-file:"'.length + 1;
-
- const endFileRegion = rule.conditionText.indexOf('"', startFileRegion);
- if (endFileRegion === -1) return null;
- // Empty file strings are invalid
- if (endFileRegion - startFileRegion <= 0) return null;
-
- CSSLoader.cssLoadId.id = int;
- CSSLoader.cssLoadId.node = sheet.ownerNode as HTMLStylableElement;
- CSSLoader.cssLoadId.sheet = sheet;
- CSSLoader.cssLoadId.file = rule.conditionText.substring(startFileRegion - 1, endFileRegion);
-
- return CSSLoader.cssLoadId;
- }
- default: {
- return null;
- }
- }
- }
-
- bundleId(): number {
- return CSSLoader.cssLoadId.bundle_id;
- }
-
- findCSSLinkTag(id: number): CSSHMRInsertionPoint | null {
- let count = 0;
- let match: CSSHMRInsertionPoint = null;
-
- const adoptedStyles = document.adoptedStyleSheets;
-
- if (this.updateMethod === CSSUpdateMethod.cssObjectModel) {
- if (adoptedStyles.length > 0) {
- count = adoptedStyles.length;
-
- for (let i = 0; i < count && match === null; i++) {
- let cssRules: CSSRuleList;
- let sheet: CSSStyleSheet;
- let ruleCount = 0;
- // Non-same origin stylesheets will potentially throw "Security error"
- // We will ignore those stylesheets and look at others.
- try {
- sheet = adoptedStyles[i];
- cssRules = sheet.rules;
- ruleCount = sheet.rules.length;
- } catch (exception) {
- continue;
- }
-
- if (sheet.disabled || sheet.rules.length === 0) {
- continue;
- }
-
- const bundleIdRule = cssRules[0] as CSSSupportsRule;
- if (bundleIdRule.type !== 12 || !bundleIdRule.conditionText.startsWith("(hmr-bid:")) {
- continue;
- }
-
- const bundleIdEnd = bundleIdRule.conditionText.indexOf(")", "(hmr-bid:".length + 1);
- if (bundleIdEnd === -1) continue;
-
- CSSLoader.cssLoadId.bundle_id = parseInt(
- bundleIdRule.conditionText.substring("(hmr-bid:".length, bundleIdEnd),
- 10,
- );
-
- for (let j = 1; j < ruleCount && match === null; j++) {
- match = this.findMatchingSupportsRule(cssRules[j] as CSSSupportsRule, id, sheet);
- }
- }
- }
- }
-
- count = document.styleSheets.length;
-
- for (let i = 0; i < count && match === null; i++) {
- let cssRules: CSSRuleList;
- let sheet: CSSStyleSheet;
- let ruleCount = 0;
- // Non-same origin stylesheets will potentially throw "Security error"
- // We will ignore those stylesheets and look at others.
- try {
- sheet = document.styleSheets.item(i);
- cssRules = sheet.rules;
- ruleCount = sheet.rules.length;
- } catch (exception) {
- continue;
- }
-
- if (sheet.disabled || sheet.rules.length === 0) {
- continue;
- }
-
- const bundleIdRule = cssRules[0] as CSSSupportsRule;
- if (bundleIdRule.type !== 12 || !bundleIdRule.conditionText.startsWith("(hmr-bid:")) {
- continue;
- }
-
- const bundleIdEnd = bundleIdRule.conditionText.indexOf(")", "(hmr-bid:".length + 1);
- if (bundleIdEnd === -1) continue;
-
- CSSLoader.cssLoadId.bundle_id = parseInt(
- bundleIdRule.conditionText.substring("(hmr-bid:".length, bundleIdEnd),
- 10,
- );
-
- for (let j = 1; j < ruleCount && match === null; j++) {
- match = this.findMatchingSupportsRule(cssRules[j] as CSSSupportsRule, id, sheet);
- }
- }
-
- // Ensure we don't leak the HTMLLinkElement
- if (match === null) {
- CSSLoader.cssLoadId.file = "";
- CSSLoader.cssLoadId.bundle_id = CSSLoader.cssLoadId.id = 0;
- CSSLoader.cssLoadId.node = null;
- CSSLoader.cssLoadId.sheet = null;
- }
-
- return match;
- }
-
- handleBuildSuccess(bytes: Uint8Array, build: API.WebsocketMessageBuildSuccess, timestamp: number) {
- const start = performance.now();
- var update = this.findCSSLinkTag(build.id);
- // The last 4 bytes of the build message are the hash of the module
- // Currently, this hash is only used for ensuring we reload the source-map
-
- if (update === null) {
- __hmrlog.debug("Skipping unused CSS.");
- return;
- }
-
- if (bytes.length === 0) {
- __hmrlog.debug("Skipping empty file");
- return;
- }
-
- let filepath = update.file;
- // We cannot safely do this because the hash would change on the server
- if (filepath.startsWith(this.hmr.cwd)) {
- filepath = filepath.substring(this.hmr.cwd.length);
- }
- const _timestamp = timestamp;
- const from_timestamp = build.from_timestamp;
- function onLoadHandler() {
- const localDuration = formatDuration(performance.now() - start);
- const fsDuration = _timestamp - from_timestamp;
- __hmrlog.log("Reloaded in", `${localDuration + fsDuration}ms`, "-", filepath);
-
- update = null;
- filepath = null;
- }
-
- // Whenever
- switch (this.updateMethod) {
- case CSSUpdateMethod.blobURL: {
- let blob = new Blob([bytes], { type: "text/css" });
-
- const blobURL = URL.createObjectURL(blob);
- // onLoad doesn't fire in Chrome.
- // I'm not sure why.
- // Guessing it only triggers when an element is added/removed, not when the href just changes
- // So we say on the next tick, we're loaded.
- setTimeout(onLoadHandler.bind(update.node), 0);
- update.node.setAttribute("href", blobURL);
- blob = null;
- URL.revokeObjectURL(blobURL);
- break;
- }
- case CSSUpdateMethod.cssObjectModel: {
- if (!this.decoder) {
- this.decoder = new TextDecoder("UTF8");
- }
-
- // This is an adoptedStyleSheet, call replaceSync and be done with it.
- if (!update.node || update.node.tagName === "HTML") {
- update.sheet.replaceSync(this.decoder.decode(bytes));
- } else if (update.node.tagName === "LINK" || update.node.tagName === "STYLE") {
- // This might cause CSS specifity issues....
- // I'm not 100% sure this is a safe operation
- const sheet = new CSSStyleSheet();
- const decoded = this.decoder.decode(bytes);
-
- sheet.replaceSync(decoded);
- update.node.remove();
-
- document.adoptedStyleSheets = [...document.adoptedStyleSheets, sheet];
- }
- break;
- }
- }
-
- bytes = null;
- }
-
- filePath(file_change_notification: API.WebsocketMessageFileChangeNotification): string | null {
- if (file_change_notification.loader !== API.Loader.css) return null;
- const tag = this.findCSSLinkTag(file_change_notification.id);
-
- if (!tag) {
- return null;
- }
-
- return tag.file;
- }
- }
-
- class HMRClient {
- static client: HMRClient;
- socket: WebSocket;
- hasWelcomed: boolean = false;
- reconnect: number = 0;
- // Server timestamps are relative to the time the server's HTTP server launched
- // This so we can send timestamps as uint32 instead of 128-bit integers
- epoch: number = 0;
- javascriptReloader: API.Reloader = API.Reloader.disable;
- loaders = {
- css: new CSSLoader(),
- };
- assetPrefixPath: string = "";
-
- sessionId: number;
-
- get dependencies() {
- return HMRModule.dependencies;
- }
-
- start() {
- if (runOnce) {
- __hmrlog.warn("Attempted to start HMR client multiple times. This may be a bug.");
- return;
- }
-
- this.loaders.css.hmr = this;
- runOnce = true;
- this.connect();
-
- // Explicitly send a socket close event so the thread doesn't have to wait for a timeout
- var origUnload = globalThis.onbeforeunload;
- globalThis.onbeforeunload = (ev: Event) => {
- this.disableReconnect = true;
-
- if (this.socket && this.socket.readyState === this.socket.OPEN) {
- this.socket.close(4990, "unload");
- }
- origUnload && origUnload.call(globalThis, [ev]);
- };
- }
-
- nextReconnectAttempt = 0;
- reconnectDelay = 16;
- debouncedReconnect = () => {
- if (
- this.socket &&
- (this.socket.readyState == this.socket.OPEN || this.socket.readyState == this.socket.CONNECTING)
- )
- return;
-
- this.nextReconnectAttempt = setTimeout(this.attemptReconnect, this.reconnectDelay);
- };
-
- attemptReconnect = () => {
- globalThis.clearTimeout(this.nextReconnectAttempt);
- if (
- this.socket &&
- (this.socket.readyState == this.socket.OPEN || this.socket.readyState == this.socket.CONNECTING)
- )
- return;
- this.connect();
- this.reconnectDelay += Math.floor(Math.random() * 128);
- };
-
- connect() {
- if (
- this.socket &&
- (this.socket.readyState == this.socket.OPEN || this.socket.readyState == this.socket.CONNECTING)
- )
- return;
-
- clientStartTime = performance.now();
-
- const baseURL = new URL(location.origin + "/bun:_api.hmr");
- baseURL.protocol = location.protocol === "https:" ? "wss" : "ws";
- this.socket = new WebSocket(baseURL.toString(), ["bun-hmr"]);
- this.socket.binaryType = "arraybuffer";
- this.socket.onclose = this.handleClose;
- this.socket.onerror = this.handleError;
- this.socket.onopen = this.handleOpen;
- this.socket.onmessage = this.handleMessage;
- }
-
- // key: module id
- // value: server-timestamp
- builds = new Map();
- cwd: string;
-
- indexOfModuleId(id: number): number {
- return HMRModule.dependencies.graph.indexOf(id);
- }
-
- static cssQueue = [];
- static cssState = CSSImportState.Pending;
- static cssAutoFOUC = false;
-
- static processPendingCSSImports() {
- const pending = HMRClient.cssQueue.slice();
- HMRClient.cssQueue.length = 0;
- return Promise.all(pending).then(() => {
- if (HMRClient.cssQueue.length > 0) {
- const _pending = HMRClient.cssQueue.slice();
- HMRClient.cssQueue.length = 0;
- return Promise.all(_pending).then(HMRClient.processPendingCSSImports);
- } else {
- return true;
- }
- });
- }
-
- static importCSS(promise: Promise) {
- switch (HMRClient.cssState) {
- case CSSImportState.Pending: {
- this.cssState = CSSImportState.Loading;
- // This means we can import without risk of FOUC
- if (document.documentElement.innerText === "" && !HMRClient.cssAutoFOUC) {
- if (document.body) document.body.style.visibility = "hidden";
- HMRClient.cssAutoFOUC = true;
- }
-
- promise.then(this.processPendingCSSImports).finally(() => {
- if (HMRClient.cssAutoFOUC) {
- // "delete" doesn't work here. Not sure why.
- if (document.body) {
- // Force layout
- window.getComputedStyle(document.body);
-
- document.body.style.visibility = "visible";
- }
- HMRClient.cssAutoFOUC = false;
- }
- this.cssState = CSSImportState.Loaded;
- });
- break;
- }
- case CSSImportState.Loaded: {
- promise.then(
- () => {},
- () => {},
- );
- break;
- }
- case CSSImportState.Loading: {
- this.cssQueue.push(promise);
- break;
- }
- }
- }
-
- static allImportedStyles = new Set();
- static onCSSImport(event) {
- HMRClient.allImportedStyles.add(event.detail);
-
- if (globalThis["Bun_disableCSSImports"]) {
- return;
- }
-
- const url = event.detail;
-
- if (typeof url !== "string" || url.length === 0) {
- console.warn("[CSS Importer] Received invalid CSS import", url);
- return;
- }
-
- const thisURL = new URL(url, location.origin);
-
- for (let i = 0; i < document.styleSheets.length; i++) {
- const sheet = document.styleSheets[i];
- if (!sheet.href) continue;
-
- if (sheet.href === url) {
- // Already imported
- return;
- }
-
- try {
- const _url1 = new URL(sheet.href, location.origin);
-
- if (_url1.pathname === thisURL.pathname) {
- // Already imported
- return;
- }
- } catch (e) {}
- }
-
- const urlString = thisURL.toString();
- HMRClient.importCSS(
- new Promise((resolve, reject) => {
- if (globalThis["Bun_disableCSSImports"]) {
- return;
- }
-
- var link = document.createElement("link");
- link.rel = "stylesheet";
- link.href = urlString;
- link.onload = () => {
- resolve();
- };
-
- link.onerror = evt => {
- console.error(`[CSS Importer] Error loading CSS file: ${urlString}\n`, evt.toString());
- reject();
- };
- document.head.appendChild(link);
- }).then(() => Promise.resolve()),
- );
- }
- static onError(event: ErrorEvent) {
- if ("error" in event && !!event.error) {
- BunError.render(event.error, HMRClient.client ? HMRClient.client.cwd : "");
- }
- }
-
- static activate(verboseOrFastRefresh: boolean = false) {
- // Support browser-like envirnments where location and WebSocket exist
- // Maybe it'll work in Deno! Who knows.
- if (this.client || !("location" in globalThis) || !("WebSocket" in globalThis)) {
- return;
- }
-
- globalThis.addEventListener("error", HMRClient.onError);
- this.client = new HMRClient();
- // if (
- // "sessionStorage" in globalThis &&
- // globalThis.sessionStorage.getItem("bun-hmr-session-id")
- // ) {
- // this.client.sessionId = parseInt(
- // globalThis.sessionStorage.getItem("bun-hmr-session-id"),
- // 16
- // );
- // } else {
- // this.client.sessionId = Math.floor(Math.random() * 65534);
- // if ("sessionStorage" in globalThis) {
- // try {
- // globalThis.sessionStorage.setItem(
- // "bun-hmr-session-id",
- // this.client.sessionId.toString(16)
- // );
- // } catch (exception) {}
- // }
- // }
- this.client.verbose = verboseOrFastRefresh;
- this.client.start();
- globalThis["__BUN_HMR"] = this.client;
- }
-
- handleBuildFailure(buffer: ByteBuffer, timestamp: number) {
- const build = API.decodeWebsocketMessageBuildFailure(buffer);
- const id = build.id;
-
- // const index = this.indexOfModuleId(id);
- // // Ignore build failures of modules that are not loaded
- // if (index === -1) {
- // this.maybeReportBuildFailure(build);
- // return;
- // }
-
- // // Build failed for a module we didn't request?
- // const minTimestamp = this.builds.get(index);
- // if (!minTimestamp) {
- // return;
- // }
- // const fail = API.decodeWebsocketMessageBuildFailure(buffer);
-
- this.reportBuildFailure(build);
- }
-
- maybeReportBuildFailure(failure: API.WebsocketMessageBuildFailure) {
- BunError.render(failure, this.cwd);
- }
-
- needsConsoleClear = false;
-
- reportBuildFailure(failure: API.WebsocketMessageBuildFailure) {
- BunError.render(failure, this.cwd);
-
- console.group(`Build failed: ${failure.module_path} (${failure.log.errors} errors)`);
- this.needsConsoleClear = true;
- for (let msg of failure.log.msgs) {
- var logFunction;
- switch (msg.level) {
- case API.MessageLevel.err: {
- logFunction = console.error;
- break;
- }
- case API.MessageLevel.warn: {
- logFunction = console.warn;
- break;
- }
- default: {
- logFunction = console.info;
- break;
- }
- }
-
- const { text, location } = msg.data;
- var output = "";
-
- if (location) {
- if (location.line > -1 && location.column > -1) {
- output += `${location.file}:${location.line}:${location.column}`;
- } else if (location.line > -1) {
- output += `${location.file}:${location.line}`;
- } else if (location.file.length > 0) {
- output += `${location.file}`;
- }
- }
- if (location && location.line_text) {
- output = output.trimRight() + "\n" + location.line_text.trim();
- }
-
- output = output.trimRight() + "\n " + text;
-
- logFunction(output.trim());
- }
- console.groupEnd();
- }
-
- verbose = false;
-
- handleError = (error: ErrorEvent) => {
- __hmrlog.error("Websocket error", error.error);
- if (this.reconnect !== 0) {
- return;
- }
- this.debouncedReconnect();
- };
-
- handleBuildSuccess(buffer: ByteBuffer, timestamp: number) {
- const build = API.decodeWebsocketMessageBuildSuccess(buffer);
-
- // Ignore builds of modules we expect a later version of
- const currentVersion = this.builds.get(build.id) || -Infinity;
-
- // allow 4ms of leeway
- if (currentVersion > build.from_timestamp + 4) {
- if (this.verbose) {
- __hmrlog.debug(
- `Ignoring outdated update for "${build.module_path}".\n Expected: >=`,
- currentVersion,
- `\n Received:`,
- build.from_timestamp,
- );
- }
- return;
- }
- var bytes =
- buffer.data.byteOffset + buffer.index + build.blob_length <= buffer.data.buffer.byteLength
- ? new Uint8Array(buffer.data.buffer, buffer.data.byteOffset + buffer.index, build.blob_length)
- : (empty ||= new Uint8Array(0));
-
- if (build.loader === API.Loader.css) {
- BunError.clear();
- if (this.needsConsoleClear) {
- console.clear();
- this.needsConsoleClear = false;
- }
- return this.loaders.css.handleBuildSuccess(bytes, build, timestamp);
- }
-
- const id = build.id;
- const index = this.indexOfModuleId(id);
- // Ignore builds of modules that are not loaded
- if (index === -1) {
- if (this.verbose) {
- __hmrlog.debug(`Skipping reload for unknown module id:`, id);
- }
-
- return;
- }
-
- if (this.verbose) {
- var filepath = HMRModule.dependencies.modules[index].file_path;
- if (filepath.startsWith(this.cwd)) {
- filepath = filepath.substring(this.cwd.length);
- }
- __hmrlog.debug("Preparing to reload", filepath);
- }
-
- // The last 4 bytes of the build message are the hash of the module
- // Currently, this hash is only used for ensuring we reload the source-map
- var end = buffer.index + build.blob_length;
- var hash = 0;
- if (end > buffer.data.length && end > 4) {
- end = buffer.data.length - 4;
- }
-
- if (end > 4 && buffer.data.length >= end + 4) {
- new Uint8Array(this.hashBuffer.buffer).set(buffer.data.subarray(end, end + 4));
- hash = this.hashBuffer[0];
- }
-
- // These are the bytes!!
-
- var reload = new HotReload(build.id, index, build, bytes, ReloadBehavior.hotReload, hash || 0);
- bytes = null;
- reload.timings.notify = timestamp - build.from_timestamp;
-
- BunError.clear();
-
- reload.run().then(
- ([module, timings]) => {
- var filepath = module.file_path;
-
- if (filepath.startsWith(this.cwd)) {
- filepath = filepath.substring(this.cwd.length);
- }
-
- if (this.needsConsoleClear) {
- console.clear();
- this.needsConsoleClear = false;
- }
-
- __hmrlog.log(`[${formatDuration(timings.total)}ms] Reloaded`, filepath);
- },
- err => {
- if (typeof err === "object" && err && err instanceof ThrottleModuleUpdateError) {
- return;
- }
- __hmrlog.error("Hot Module Reload failed!", err);
- debugger;
- },
- );
- }
-
- performFullReload() {
- if (typeof location !== "undefined") {
- if (this.socket.readyState === this.socket.OPEN) {
- // Disable reconnecting
- this.reconnect = 1;
- this.socket.close();
- }
- location.reload();
- }
- }
-
- handleFileChangeNotification(buffer: ByteBuffer, timestamp: number, copy_file_path: boolean) {
- const notification = API.decodeWebsocketMessageFileChangeNotification(buffer);
- let file_path = "";
- switch (notification.loader) {
- case API.Loader.css: {
- file_path = this.loaders.css.filePath(notification);
- break;
- }
-
- case API.Loader.js:
- case API.Loader.jsx:
- case API.Loader.tsx:
- case API.Loader.ts:
- case API.Loader.json: {
- const index = HMRModule.dependencies.graph
- .subarray(0, HMRModule.dependencies.graph_used)
- .indexOf(notification.id);
-
- if (index > -1) {
- file_path = HMRModule.dependencies.modules[index].file_path;
- }
- break;
- }
-
- default: {
- return;
- }
- }
-
- return this.handleFileChangeNotificationBase(timestamp, notification, file_path, copy_file_path);
- }
-
- private handleFileChangeNotificationBase(
- timestamp: number,
- notification: API.WebsocketMessageFileChangeNotification,
- file_path: string,
- copy_file_path: boolean,
- ) {
- const accept = file_path && file_path.length > 0;
-
- if (!accept) {
- if (this.verbose) {
- __hmrlog.debug("Unknown module changed, skipping");
- }
- return;
- }
-
- if ((this.builds.get(notification.id) || -Infinity) > timestamp) {
- __hmrlog.debug(`Received stale update for ${file_path}`);
- return;
- }
-
- let reloadBehavior = ReloadBehavior.ignore;
-
- switch (notification.loader) {
- // CSS always supports hot reloading
- case API.Loader.css: {
- this.builds.set(notification.id, timestamp);
- // When we're dealing with CSS, even though the watch event happened for a file in the bundle
- // We want it to regenerate the entire bundle
- // So we must swap out the ID we send for the ID of the corresponding bundle.
- notification.id = this.loaders.css.bundleId();
- this.builds.set(notification.id, timestamp);
- reloadBehavior = ReloadBehavior.hotReload;
- break;
- }
- // The backend will detect if they have react-refresh in their bundle
- // If so, it will use it.
- // Else, it will fall back to live reloading.
- case API.Loader.js:
- case API.Loader.jsx:
- case API.Loader.json:
- case API.Loader.ts:
- case API.Loader.tsx: {
- switch (this.javascriptReloader) {
- case API.Reloader.disable: {
- break;
- }
- case API.Reloader.fast_refresh: {
- this.builds.set(notification.id, timestamp);
- reloadBehavior = ReloadBehavior.hotReload;
- break;
- }
- case API.Reloader.live: {
- reloadBehavior = ReloadBehavior.fullReload;
- break;
- }
- }
- break;
- }
- }
-
- switch (reloadBehavior) {
- // This is the same command/logic for both JS and CSS hot reloading.
- case ReloadBehavior.hotReload: {
- if (copy_file_path && !this.buildCommandBufWithFilePath) {
- // on Linux, max file path length is 4096 bytes
- // on macOS & Windows, max file path length is 1024 bytes
- // 256 is extra breathing room
- this.buildCommandBufWithFilePath = new Uint8Array(4096 + 256);
- }
-
- const writeBuffer = !copy_file_path ? this.buildCommandBuf : this.buildCommandBufWithFilePath;
- writeBuffer[0] = !copy_file_path
- ? API.WebsocketCommandKind.build
- : API.WebsocketCommandKind.build_with_file_path;
- this.buildCommandUArray[0] = timestamp;
- writeBuffer.set(this.buildCommandUArrayEight, 1);
- this.buildCommandUArray[0] = notification.id;
- writeBuffer.set(this.buildCommandUArrayEight, 5);
-
- if (copy_file_path) {
- if (!textEncoder) {
- textEncoder = new TextEncoder();
- }
-
- this.buildCommandUArray[0] = file_path.length;
- writeBuffer.set(this.buildCommandUArrayEight, 9);
-
- const out = textEncoder.encodeInto(file_path, writeBuffer.subarray(13));
- this.socket.send(this.buildCommandBufWithFilePath.subarray(0, 13 + out.written));
- } else {
- this.socket.send(this.buildCommandBuf);
- }
-
- if (this.verbose) {
- __hmrlog.debug(`Requesting update for ${file_path}`);
- }
- break;
- }
-
- case ReloadBehavior.fullReload: {
- this.performFullReload();
- break;
- }
- }
- }
-
- buildCommandBuf = new Uint8Array(9);
- buildCommandUArray = new Uint32Array(1);
- buildCommandUArrayEight = new Uint8Array(this.buildCommandUArray.buffer);
- hashBuffer = new Uint32Array(1);
-
- // lazily allocate because it's going to be much larger than 9 bytes
- buildCommandBufWithFilePath: Uint8Array;
-
- // On open, reset the delay for reconnecting
- handleOpen = (event: Event) => {
- globalThis.clearTimeout(this.nextReconnectAttempt);
- setTimeout(() => {
- if (this.socket && this.socket.readyState == this.socket.OPEN) {
- globalThis.clearTimeout(this.nextReconnectAttempt);
- this.reconnectDelay = 16;
- }
- }, 16);
- };
-
- handleMessage = (event: MessageEvent) => {
- const data = new Uint8Array(event.data);
- const message_header_byte_buffer = new ByteBuffer(data);
- const header = API.decodeWebsocketMessage(message_header_byte_buffer);
- const buffer = new ByteBuffer(data.subarray(message_header_byte_buffer.index));
-
- switch (header.kind) {
- case API.WebsocketMessageKind.build_fail: {
- this.handleBuildFailure(buffer, header.timestamp);
- break;
- }
- case API.WebsocketMessageKind.build_success: {
- this.handleBuildSuccess(buffer, header.timestamp);
- break;
- }
-
- case API.WebsocketMessageKind.resolve_file: {
- const { id } = API.decodeWebsocketMessageResolveID(buffer);
- const timestamp = this.builds.get(id) || 0;
-
- if (timestamp == 0 && HotReload.VERBOSE) {
- __hmrlog.debug(`Unknown module? ${id}`);
- return;
- }
-
- const index = HMRModule.dependencies.graph.subarray(0, HMRModule.dependencies.graph_used).indexOf(id);
- var file_path: string = "";
- var loader = API.Loader.js;
- if (index > -1) {
- file_path = HMRModule.dependencies.modules[index].file_path;
- } else {
- const tag = this.loaders.css.findCSSLinkTag(id);
- if (tag && tag.file.length) {
- file_path = tag.file;
- }
- }
-
- if (!file_path || file_path.length === 0) {
- if (HotReload.VERBOSE) {
- __hmrlog.debug(`Unknown module? ${id}`);
- }
- return;
- }
-
- switch (file_path.substring(file_path.lastIndexOf("."))) {
- case ".css": {
- loader = API.Loader.css;
- break;
- }
-
- case ".mjs":
- case ".cjs":
- case ".js": {
- loader = API.Loader.js;
- break;
- }
-
- case ".json": {
- loader = API.Loader.json;
- break;
- }
-
- case ".cts":
- case ".mts":
- case ".ts": {
- loader = API.Loader.ts;
- break;
- }
-
- case ".tsx": {
- loader = API.Loader.tsx;
- break;
- }
-
- case ".jsx": {
- loader = API.Loader.jsx;
- break;
- }
-
- default: {
- loader = API.Loader.file;
- break;
- }
- }
-
- this.handleFileChangeNotificationBase(timestamp, { id, loader }, file_path, true);
- break;
- }
- case API.WebsocketMessageKind.file_change_notification: {
- this.handleFileChangeNotification(buffer, header.timestamp, false);
- break;
- }
- case API.WebsocketMessageKind.file_change_notification_with_hint: {
- this.handleFileChangeNotification(buffer, header.timestamp, true);
- break;
- }
- case API.WebsocketMessageKind.welcome: {
- const now = performance.now();
-
- this.hasWelcomed = true;
- const welcome = API.decodeWebsocketMessageWelcome(buffer);
- this.epoch = welcome.epoch;
- this.javascriptReloader = welcome.javascriptReloader;
- this.cwd = welcome.cwd;
- this.assetPrefixPath = welcome.assetPrefix;
-
- switch (this.javascriptReloader) {
- case API.Reloader.fast_refresh: {
- __hmrlog.log("HMR connected in", formatDuration(now - clientStartTime), "ms");
- break;
- }
- case API.Reloader.live: {
- __hmrlog.log("Live reload connected in", formatDuration(now - clientStartTime), "ms");
- break;
- }
- default: {
- __hmrlog.log("Bun connected in", formatDuration(now - clientStartTime), "ms");
- break;
- }
- }
- clientStartTime = now;
- if (!this.epoch) {
- __hmrlog.warn("Internal HMR error");
- }
- break;
- }
- }
- };
-
- disableReconnect = false;
-
- handleClose = (event: CloseEvent) => {
- if (this.reconnect !== 0 || this.disableReconnect) {
- return;
- }
-
- this.debouncedReconnect();
- };
- }
- let pendingUpdateCount = 0;
-
- class ThrottleModuleUpdateError extends Error {
- constructor(message) {
- super(message);
- }
- }
-
- class HotReload {
- module_id: number = 0;
- module_index: number = 0;
- build: API.WebsocketMessageBuildSuccess;
- hash: number = 0 | 0;
-
- timings = {
- notify: 0,
- decode: 0,
- import: 0,
- callbacks: 0,
- total: 0,
- start: 0,
- };
- static VERBOSE = false;
- bytes: Uint8Array;
- reloader: ReloadBehavior;
-
- constructor(
- module_id: HotReload["module_id"],
- module_index: HotReload["module_index"],
- build: HotReload["build"],
- bytes: Uint8Array,
- reloader: ReloadBehavior,
- hash: number,
- ) {
- this.module_id = module_id;
- this.module_index = module_index;
- this.build = build;
- this.bytes = bytes;
- this.reloader = reloader;
- this.hash = hash;
- }
-
- async run() {
- pendingUpdateCount++;
- let result: [HMRModule, HotReload["timings"]];
- try {
- result = await this._run();
- } finally {
- pendingUpdateCount--;
- }
-
- return result;
- }
-
- private async _run(): Promise<[HMRModule, HotReload["timings"]]> {
- const currentPendingUpdateCount = pendingUpdateCount;
-
- const importStart = performance.now();
- let orig_deps = HMRModule.dependencies;
- // we must preserve the updater since that holds references to the real exports.
- // this is a fundamental limitation of using esmodules for HMR.
- // we cannot export new modules. we can only mutate existing ones.
-
- const oldGraphUsed = HMRModule.dependencies.graph_used;
- var oldModule =
- HMRModule.dependencies.modules.length > this.module_index && HMRModule.dependencies.modules[this.module_index];
- HMRModule.dependencies = orig_deps.fork(this.module_index);
- var blobURL = "";
-
- // We inject the source map URL into the end of the file.
- // We do that here for a few reasons:
- // 1. It is hard to correctly set the path in here to what the browser expects.
- // 2.
- const modulePathWithoutLeadingSlash =
- this.build.module_path.length > 0 && this.build.module_path[0] === "/"
- ? this.build.module_path.substring(1)
- : this.build.module_path;
- const sourceMapURL =
- this.hash > 0 && this.build.module_path.length > 0
- ? `\n//# sourceMappingURL=${
- // location.origin does not have a trailing slash
- globalThis.location.origin
- }/${modulePathWithoutLeadingSlash}.map?b=${this.hash.toString(16)}`
- : "";
-
- try {
- const blob = new Blob(sourceMapURL.length > 0 ? [this.bytes, sourceMapURL] : [this.bytes], {
- type: "text/javascript",
- });
- blobURL = URL.createObjectURL(blob);
- HMRModule.dependencies.blobToID.set(blobURL, this.module_id);
- await import(blobURL);
- this.bytes = null;
- URL.revokeObjectURL(blobURL);
- this.timings.import = performance.now() - importStart;
- } catch (exception) {
- HMRModule.dependencies = orig_deps;
- URL.revokeObjectURL(blobURL);
- // Ensure we don't keep the bytes around longer than necessary
- this.bytes = null;
-
- if ("__BunRenderHMRError" in globalThis) {
- globalThis.__BunRenderHMRError(exception, oldModule.file_path, oldModule.id);
- }
-
- oldModule = null;
- throw exception;
- }
-
- // We didn't import any new modules, so we resume as before.
- if (HMRModule.dependencies.graph_used === this.module_index) {
- HMRModule.dependencies.graph_used = oldGraphUsed;
- } else {
- // If we do import a new module, we have to do a full page reload for now
- }
-
- blobURL = "";
- // Ensure we don't keep the bytes around longer than necessary
- this.bytes = null;
-
- if (HotReload.VERBOSE) {
- __hmrlog.debug(
- "Re-imported",
- HMRModule.dependencies.modules[this.module_index].file_path,
- "in",
- formatDuration(this.timings.import),
- ". Running callbacks",
- );
- }
-
- const callbacksStart = performance.now();
- const origUpdaters = oldModule
- ? new Set(oldModule.additional_updaters)
- : new Set();
- try {
- switch (this.reloader) {
- case ReloadBehavior.hotReload: {
- let foundBoundary = false;
-
- const isOldModuleDead =
- oldModule &&
- oldModule.previousVersion &&
- oldModule.previousVersion.id === oldModule.id &&
- oldModule.hasSameExports(oldModule.previousVersion);
-
- var thisMod = HMRModule.dependencies.modules[this.module_index];
- if (oldModule && oldModule._update !== thisMod._update) {
- // ESM-based HMR has a disadvantage against CommonJS HMR
- // ES Namespace objects are not [[Configurable]]
- // That means we have to loop through all previous versions of updated modules that that have unique export names
- // and updates those exports specifically
- // Otherwise, changes will not be reflected properly
- // However, we only need to loop through modules that add or remove exports, i.e. those are ones which have "real" exports
- if (!isOldModuleDead) {
- oldModule.boundUpdate ||= oldModule.update.bind(oldModule);
-
- if (thisMod.additional_updaters) thisMod.additional_updaters.add(oldModule.boundUpdate);
- else thisMod.additional_updaters = new Set([oldModule.boundUpdate]);
-
- thisMod.previousVersion = oldModule;
- } else {
- if (oldModule.previousVersion) thisMod.previousVersion = oldModule.previousVersion;
-
- thisMod.additional_updaters = origUpdaters;
- }
- }
-
- const end = Math.min(this.module_index + 1, HMRModule.dependencies.graph_used);
- // -- For generic hot reloading --
- // ES Modules delay execution until all imports are parsed
- // They execute depth-first
- // If you load N modules and append each module ID to the array, 0 is the *last* unique module imported.
- // modules.length - 1 is the first.
- // Therefore, to reload all the modules in the correct order, we traverse the graph backwards
- // This only works when the graph is up to date.
- // If the import order changes, we need to regenerate the entire graph
- // Which sounds expensive, until you realize that we are mostly talking about an array that will be typically less than 1024 elements
- // Computers can create an array of < 1024 pointer-sized elements in < 1ms easy!
- // --
-
- // -- For React Fast Refresh --
- // We must find a React Refresh boundary. This is a module that only exports React components.
- // If we do not find a React Refresh boundary, we must instead perform a full page reload.
- for (
- let i = 0;
- i <= end;
- i++ // let i = HMRModule.dependencies.graph_used - 1; // i > this.module_index; // i--
- ) {
- const mod = HMRModule.dependencies.modules[i];
- if (!mod) continue; // this array is holey sometimes
- let handled = false;
-
- if (!mod.exports.__hmrDisable) {
- if (typeof mod.dispose === "function") {
- mod.dispose();
- handled = true;
- }
- if (typeof mod.accept === "function") {
- mod.accept();
- handled = true;
- }
-
- // If we don't find a boundary, we will need to do a full page load
- if ((mod as FastRefreshModule).isRefreshBoundary) {
- foundBoundary = true;
- }
-
- // Automatically re-initialize the dependency
- if (!handled) {
- mod.update();
- }
- }
- }
-
- // By the time we get here, it's entirely possible that another update is waiting
- // Instead of scheduling it, we are going to just ignore this update.
- // But we still need to re-initialize modules regardless because otherwise a dependency may not reload properly
- if (pendingUpdateCount === currentPendingUpdateCount && foundBoundary) {
- FastRefreshLoader.RefreshRuntime.performReactRefresh();
- // Remove potential memory leak
- if (isOldModuleDead) oldModule.previousVersion = null;
- } else if (pendingUpdateCount === currentPendingUpdateCount) {
- FastRefreshLoader.performFullRefresh();
- } else {
- return Promise.reject(
- new ThrottleModuleUpdateError(
- `Expected pendingUpdateCount: ${currentPendingUpdateCount} but received: ${pendingUpdateCount}`,
- ),
- );
- }
-
- break;
- }
- }
- } catch (exception) {
- HMRModule.dependencies = orig_deps;
- HMRModule.dependencies.modules[this.module_index].additional_updaters = origUpdaters;
- throw exception;
- }
- this.timings.callbacks = performance.now() - callbacksStart;
-
- if (HotReload.VERBOSE) {
- __hmrlog.debug(
- "Ran callbacks",
- HMRModule.dependencies.modules[this.module_index].file_path,
- "in",
- formatDuration(this.timings.callbacks),
- "ms",
- );
- }
-
- orig_deps = null;
- this.timings.total = this.timings.import + this.timings.callbacks + this.timings.notify;
- return Promise.resolve([HMRModule.dependencies.modules[this.module_index], this.timings]);
- }
- }
-
- type AnyHMRModule = HMRModule | FastRefreshModule;
- class DependencyGraph {
- modules: AnyHMRModule[];
- graph: Uint32Array;
- graph_used = 0;
- blobToID = new Map();
-
- getModuleByBlobURL(url: string): AnyHMRModule | null {
- const id = this.blobToID.get(url);
- return Number.isFinite(id) && this.getModuleByID(id);
- }
-
- getFilePathFromBlob(url: string): string | null {
- const module = this.getModuleByBlobURL(url);
- if (!module) return null;
-
- let filepath = module.file_path;
- // We cannot safely do this because the hash would change on the server
- if (filepath.startsWith(HMRClient.client.cwd)) {
- filepath = filepath.substring(HMRClient.client.cwd.length);
- }
-
- return filepath;
- }
-
- getModuleByID(id: number): AnyHMRModule | null {
- const index = this.graph.indexOf(id);
- return index > -1 ? this.modules[index] : null;
- }
-
- loadDefaults() {
- this.modules = new Array(32);
- this.graph = new Uint32Array(32);
- this.graph_used = 0;
- }
-
- static loadWithDefaults() {
- const graph = new DependencyGraph();
- graph.loadDefaults();
- return graph;
- }
-
- fork(offset: number) {
- const graph = new DependencyGraph();
- graph.modules = this.modules.slice();
- graph.graph_used = offset;
- graph.graph = this.graph.slice();
- return graph;
- }
- }
-
- class HMRModule {
- constructor(id: number, file_path: string) {
- this.id = id;
- this.file_path = file_path;
-
- Object.defineProperty(this, "name", {
- get() {
- return this.file_path;
- },
- configurable: false,
- enumerable: false,
- });
-
- if (!HMRModule.dependencies) {
- HMRModule.dependencies = HMRModule._dependencies;
- }
-
- this.graph_index = HMRModule.dependencies.graph_used++;
-
- // Grow the dependencies graph
- if (HMRModule.dependencies.graph.length <= this.graph_index) {
- const new_graph = new Uint32Array(HMRModule.dependencies.graph.length * 4);
- new_graph.set(HMRModule.dependencies.graph);
- HMRModule.dependencies.graph = new_graph;
-
- // In-place grow. This creates a holey array, which is bad, but less bad than pushing potentially 1000 times
- HMRModule.dependencies.modules.length = new_graph.length;
- }
-
- HMRModule.dependencies.modules[this.graph_index] = this;
- HMRModule.dependencies.graph[this.graph_index] = this.id;
- }
-
- previousVersion = null;
-
- hasSameExports(that: AnyHMRModule) {
- const thisKeys = Object.keys(this.exports);
- const thatKeys = Object.keys(that.exports);
- if (thisKeys.length !== thatKeys.length) {
- return false;
- }
-
- for (let i = 0; i < thisKeys.length; i++) {
- if (thisKeys[i] !== thatKeys[i]) {
- return false;
- }
- }
-
- return true;
- }
- #updateFunction;
- get _update() {
- return this.#updateFunction;
- }
- set _update(value) {
- this.#updateFunction = value;
- var existing = registryMap.get(this.file_path);
- }
- boundUpdate;
- update() {
- var updaters = registryMap.get(this.id);
- if (updaters?.length) {
- for (let update of updaters) {
- update(this.exports);
- }
- }
-
- this._update(this.exports);
- }
-
- static _dependencies = DependencyGraph.loadWithDefaults();
- exportAll(object: Object) {
- // object[alias] must be a function
- for (let alias in object) {
- this._exports[alias] = object[alias];
- Object.defineProperty(this.exports, alias, {
- get: this._exports[alias],
- configurable: true,
- enumerable: true,
- });
- }
- }
-
- static dependencies: DependencyGraph;
- file_path: string;
- _load = function () {};
- id = 0;
- graph_index = 0;
- _exports = {};
- exports = {};
- }
-
- function injectFastRefresh(RefreshRuntime) {
- if (!FastRefreshLoader.hasInjectedFastRefresh) {
- RefreshRuntime.injectIntoGlobalHook(globalThis);
- FastRefreshLoader.hasInjectedFastRefresh = true;
- }
- }
-
- class FastRefreshModule extends HMRModule {
- constructor(id: number, file_path: string, RefreshRuntime: any) {
- super(id, file_path);
-
- // 4,000,000,000 in base36 occupies 7 characters
- // file path is probably longer
- // small strings are better strings
- this.refreshRuntimeBaseID = (this.file_path.length > 7 ? this.id.toString(36) : this.file_path) + "/";
- FastRefreshLoader.RefreshRuntime = FastRefreshLoader.RefreshRuntime || RefreshRuntime;
-
- if (!FastRefreshLoader.hasInjectedFastRefresh) {
- RefreshRuntime.injectIntoGlobalHook(globalThis);
- FastRefreshLoader.hasInjectedFastRefresh = true;
- }
- }
-
- refreshRuntimeBaseID: string;
- isRefreshBoundary = false;
-
- // $RefreshReg$
- $r_(Component: any, id: string) {
- FastRefreshLoader.RefreshRuntime.register(Component, this.refreshRuntimeBaseID + id);
- }
- // $RefreshReg$(Component, Component.name || Component.displayName)
- $r(Component: any) {
- if (!FastRefreshLoader.RefreshRuntime.isLikelyComponentType(Component)) {
- return;
- }
-
- this.$r_(Component, Component.name || Component.displayName);
- }
-
- // Auto-register exported React components so we only have to manually register the non-exported ones
- // This is what Metro does: https://github.com/facebook/metro/blob/9f2b1210a0f66378dd93e5fcaabc464c86c9e236/packages/metro-runtime/src/polyfills/require.js#L905
- exportAll(object: any) {
- super.exportAll(object);
-
- // One thing I'm unsure of:
- // Do we need to register the exports object iself? Is it important for some namespacing thing?
- // Metro seems to do that. However, that might be an artifact of CommonJS modules. People do module.exports = SomeReactComponent.
- var hasExports = false;
- var onlyExportsComponents = true;
- for (const key in object) {
- if (key === "__esModule") {
- continue;
- }
-
- hasExports = true;
-
- // Everything in here should always be a function
- // exportAll({blah: () => blah})
- // If you see exception right here, please file an issue and include the source file in the issue.
- const Component = object[key]();
-
- // Ensure exported React components always have names
- // This is for simpler debugging
- if (Component && typeof Component === "function" && !("name" in Component) && Object.isExtensible(Component)) {
- const named = {
- get() {
- return key;
- },
- enumerable: false,
- configurable: true,
- };
- // Ignore any errors if it turns out this was already set as not configurable
- try {
- // "name" is the official JavaScript way
- // "displayName" is the legacy React way
- Object.defineProperties(Component, {
- name: named,
- displayName: named,
- });
- } catch (exception) {}
- }
-
- if (!FastRefreshLoader.RefreshRuntime.isLikelyComponentType(Component)) {
- onlyExportsComponents = false;
- // We can't stop here because we may have other exports which are components that need to be registered.
- continue;
- }
-
- this.$r_(Component, key);
- }
-
- this.isRefreshBoundary = hasExports && onlyExportsComponents;
- }
-
- loaded(_onUpdate) {
- this._update = _onUpdate;
- }
- }
-
- var __hmrlog = {
- debug(...args) {
- // console.debug("[bun]", ...args);
- console.debug(...args);
- },
- error(...args) {
- // console.error("[bun]", ...args);
- console.error(...args);
- },
- log(...args) {
- // console.log("[bun]", ...args);
- console.log(...args);
- },
- warn(...args) {
- // console.warn("[bun]", ...args);
- console.warn(...args);
- },
- };
-
- // __HMRModule = FastRefreshModule;
- __HMRModule = HMRModule;
- __FastRefreshModule = FastRefreshModule;
- __HMRClient = HMRClient;
- __injectFastRefresh = injectFastRefresh;
- if ("document" in globalThis) {
- document.addEventListener("onimportcss", HMRClient.onCSSImport, {
- passive: true,
- });
-
- // window.addEventListener("error", HMRClient.onError, { passive: true });
- }
- globalThis["__BUN"] = HMRClient;
- globalThis["__BUN_ERROR"] = BunError;
-}
-
-export { __FastRefreshModule, __HMRClient, __HMRModule, __injectFastRefresh };
diff --git a/src/runtime/index-with-refresh.ts b/src/runtime/index-with-refresh.ts
deleted file mode 100644
index 670caa8bf1ae9..0000000000000
--- a/src/runtime/index-with-refresh.ts
+++ /dev/null
@@ -1,12 +0,0 @@
-// @ts-nocheck
-export * as __FastRefreshRuntime from "../react-refresh";
-export * from "./errors";
-export * from "./hmr";
-export * from "./index-without-hmr";
-
-globalThis.process ||= {
- env: {},
- cwd() {
- return "/bun-fake-dir/";
- },
-} as any;
diff --git a/src/runtime/index-without-hmr.ts b/src/runtime/index-without-hmr.ts
deleted file mode 100644
index 514d6a23a018a..0000000000000
--- a/src/runtime/index-without-hmr.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-// @ts-nocheck
-export * from "../runtime.js";
-// export { default as regeneratorRuntime } from "./regenerator";
diff --git a/src/runtime/index.ts b/src/runtime/index.ts
deleted file mode 100644
index 313abe8f5b142..0000000000000
--- a/src/runtime/index.ts
+++ /dev/null
@@ -1,11 +0,0 @@
-// @ts-nocheck
-export * from "./errors";
-export * from "./hmr";
-export * from "./index-without-hmr";
-
-globalThis.process ||= {
- env: {},
- cwd() {
- return "/bun-fake-dir/";
- },
-};
diff --git a/src/runtime/regenerator.ts b/src/runtime/regenerator.ts
deleted file mode 100644
index a3a7ba75ed59c..0000000000000
--- a/src/runtime/regenerator.ts
+++ /dev/null
@@ -1,741 +0,0 @@
-// @ts-nocheck
-/**
- * Copyright (c) 2014-present, Facebook, Inc.
- *
- * This source code is licensed under the MIT license found in the
- * LICENSE file in the root directory of this source tree.
- */
-
-var runtime = (function (exports) {
- "use strict";
-
- var Op = Object.prototype;
- var hasOwn = Op.hasOwnProperty;
- var undefined; // More compressible than void 0.
- var $Symbol = typeof Symbol === "function" ? Symbol : {};
- var iteratorSymbol = $Symbol.iterator || "@@iterator";
- var asyncIteratorSymbol = $Symbol.asyncIterator || "@@asyncIterator";
- var toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag";
-
- function define(obj, key, value) {
- Object.defineProperty(obj, key, {
- value: value,
- enumerable: true,
- configurable: true,
- writable: true,
- });
- return obj[key];
- }
- try {
- // IE 8 has a broken Object.defineProperty that only works on DOM objects.
- define({}, "");
- } catch (err) {
- define = function (obj, key, value) {
- return (obj[key] = value);
- };
- }
-
- function wrap(innerFn, outerFn, self, tryLocsList) {
- // If outerFn provided and outerFn.prototype is a Generator, then outerFn.prototype instanceof Generator.
- var protoGenerator = outerFn && outerFn.prototype instanceof Generator ? outerFn : Generator;
- var generator = Object.create(protoGenerator.prototype);
- var context = new Context(tryLocsList || []);
-
- // The ._invoke method unifies the implementations of the .next,
- // .throw, and .return methods.
- generator._invoke = makeInvokeMethod(innerFn, self, context);
-
- return generator;
- }
- exports.wrap = wrap;
-
- // Try/catch helper to minimize deoptimizations. Returns a completion
- // record like context.tryEntries[i].completion. This interface could
- // have been (and was previously) designed to take a closure to be
- // invoked without arguments, but in all the cases we care about we
- // already have an existing method we want to call, so there's no need
- // to create a new function object. We can even get away with assuming
- // the method takes exactly one argument, since that happens to be true
- // in every case, so we don't have to touch the arguments object. The
- // only additional allocation required is the completion record, which
- // has a stable shape and so hopefully should be cheap to allocate.
- function tryCatch(fn, obj, arg) {
- try {
- return { type: "normal", arg: fn.call(obj, arg) };
- } catch (err) {
- return { type: "throw", arg: err };
- }
- }
-
- var GenStateSuspendedStart = "suspendedStart";
- var GenStateSuspendedYield = "suspendedYield";
- var GenStateExecuting = "executing";
- var GenStateCompleted = "completed";
-
- // Returning this object from the innerFn has the same effect as
- // breaking out of the dispatch switch statement.
- var ContinueSentinel = {};
-
- // Dummy constructor functions that we use as the .constructor and
- // .constructor.prototype properties for functions that return Generator
- // objects. For full spec compliance, you may wish to configure your
- // minifier not to mangle the names of these two functions.
- function Generator() {}
- function GeneratorFunction() {}
- function GeneratorFunctionPrototype() {}
-
- // This is a polyfill for %IteratorPrototype% for environments that
- // don't natively support it.
- var IteratorPrototype = {};
- define(IteratorPrototype, iteratorSymbol, function () {
- return this;
- });
-
- var getProto = Object.getPrototypeOf;
- var NativeIteratorPrototype = getProto && getProto(getProto(values([])));
- if (
- NativeIteratorPrototype &&
- NativeIteratorPrototype !== Op &&
- hasOwn.call(NativeIteratorPrototype, iteratorSymbol)
- ) {
- // This environment has a native %IteratorPrototype%; use it instead
- // of the polyfill.
- IteratorPrototype = NativeIteratorPrototype;
- }
-
- var Gp = (GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(IteratorPrototype));
- GeneratorFunction.prototype = GeneratorFunctionPrototype;
- define(Gp, "constructor", GeneratorFunctionPrototype);
- define(GeneratorFunctionPrototype, "constructor", GeneratorFunction);
- GeneratorFunction.displayName = define(GeneratorFunctionPrototype, toStringTagSymbol, "GeneratorFunction");
-
- // Helper for defining the .next, .throw, and .return methods of the
- // Iterator interface in terms of a single ._invoke method.
- function defineIteratorMethods(prototype) {
- ["next", "throw", "return"].forEach(function (method) {
- define(prototype, method, function (arg) {
- return this._invoke(method, arg);
- });
- });
- }
-
- exports.isGeneratorFunction = function (genFun) {
- var ctor = typeof genFun === "function" && genFun.constructor;
- return ctor
- ? ctor === GeneratorFunction ||
- // For the native GeneratorFunction constructor, the best we can
- // do is to check its .name property.
- (ctor.displayName || ctor.name) === "GeneratorFunction"
- : false;
- };
-
- exports.mark = function (genFun) {
- if (Object.setPrototypeOf) {
- Object.setPrototypeOf(genFun, GeneratorFunctionPrototype);
- } else {
- genFun.__proto__ = GeneratorFunctionPrototype;
- define(genFun, toStringTagSymbol, "GeneratorFunction");
- }
- genFun.prototype = Object.create(Gp);
- return genFun;
- };
-
- // Within the body of any async function, `await x` is transformed to
- // `yield regeneratorRuntime.awrap(x)`, so that the runtime can test
- // `hasOwn.call(value, "__await")` to determine if the yielded value is
- // meant to be awaited.
- exports.awrap = function (arg) {
- return { __await: arg };
- };
-
- function AsyncIterator(generator, PromiseImpl) {
- function invoke(method, arg, resolve, reject) {
- var record = tryCatch(generator[method], generator, arg);
- if (record.type === "throw") {
- reject(record.arg);
- } else {
- var result = record.arg;
- var value = result.value;
- if (value && typeof value === "object" && hasOwn.call(value, "__await")) {
- return PromiseImpl.resolve(value.__await).then(
- function (value) {
- invoke("next", value, resolve, reject);
- },
- function (err) {
- invoke("throw", err, resolve, reject);
- },
- );
- }
-
- return PromiseImpl.resolve(value).then(
- function (unwrapped) {
- // When a yielded Promise is resolved, its final value becomes
- // the .value of the Promise<{value,done}> result for the
- // current iteration.
- result.value = unwrapped;
- resolve(result);
- },
- function (error) {
- // If a rejected Promise was yielded, throw the rejection back
- // into the async generator function so it can be handled there.
- return invoke("throw", error, resolve, reject);
- },
- );
- }
- }
-
- var previousPromise;
-
- function enqueue(method, arg) {
- function callInvokeWithMethodAndArg() {
- return new PromiseImpl(function (resolve, reject) {
- invoke(method, arg, resolve, reject);
- });
- }
-
- return (previousPromise =
- // If enqueue has been called before, then we want to wait until
- // all previous Promises have been resolved before calling invoke,
- // so that results are always delivered in the correct order. If
- // enqueue has not been called before, then it is important to
- // call invoke immediately, without waiting on a callback to fire,
- // so that the async generator function has the opportunity to do
- // any necessary setup in a predictable way. This predictability
- // is why the Promise constructor synchronously invokes its
- // executor callback, and why async functions synchronously
- // execute code before the first await. Since we implement simple
- // async functions in terms of async generators, it is especially
- // important to get this right, even though it requires care.
- previousPromise
- ? previousPromise.then(
- callInvokeWithMethodAndArg,
- // Avoid propagating failures to Promises returned by later
- // invocations of the iterator.
- callInvokeWithMethodAndArg,
- )
- : callInvokeWithMethodAndArg());
- }
-
- // Define the unified helper method that is used to implement .next,
- // .throw, and .return (see defineIteratorMethods).
- this._invoke = enqueue;
- }
-
- defineIteratorMethods(AsyncIterator.prototype);
- define(AsyncIterator.prototype, asyncIteratorSymbol, function () {
- return this;
- });
- exports.AsyncIterator = AsyncIterator;
-
- // Note that simple async functions are implemented on top of
- // AsyncIterator objects; they just return a Promise for the value of
- // the final result produced by the iterator.
- exports.async = function (innerFn, outerFn, self, tryLocsList, PromiseImpl) {
- if (PromiseImpl === void 0) PromiseImpl = Promise;
-
- var iter = new AsyncIterator(wrap(innerFn, outerFn, self, tryLocsList), PromiseImpl);
-
- return exports.isGeneratorFunction(outerFn)
- ? iter // If outerFn is a generator, return the full iterator.
- : iter.next().then(function (result) {
- return result.done ? result.value : iter.next();
- });
- };
-
- function makeInvokeMethod(innerFn, self, context) {
- var state = GenStateSuspendedStart;
-
- return function invoke(method, arg) {
- if (state === GenStateExecuting) {
- throw new Error("Generator is already running");
- }
-
- if (state === GenStateCompleted) {
- if (method === "throw") {
- throw arg;
- }
-
- // Be forgiving, per 25.3.3.3.3 of the spec:
- // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-generatorresume
- return doneResult();
- }
-
- context.method = method;
- context.arg = arg;
-
- while (true) {
- var delegate = context.delegate;
- if (delegate) {
- var delegateResult = maybeInvokeDelegate(delegate, context);
- if (delegateResult) {
- if (delegateResult === ContinueSentinel) continue;
- return delegateResult;
- }
- }
-
- if (context.method === "next") {
- // Setting context._sent for legacy support of Babel's
- // function.sent implementation.
- context.sent = context._sent = context.arg;
- } else if (context.method === "throw") {
- if (state === GenStateSuspendedStart) {
- state = GenStateCompleted;
- throw context.arg;
- }
-
- context.dispatchException(context.arg);
- } else if (context.method === "return") {
- context.abrupt("return", context.arg);
- }
-
- state = GenStateExecuting;
-
- var record = tryCatch(innerFn, self, context);
- if (record.type === "normal") {
- // If an exception is thrown from innerFn, we leave state ===
- // GenStateExecuting and loop back for another invocation.
- state = context.done ? GenStateCompleted : GenStateSuspendedYield;
-
- if (record.arg === ContinueSentinel) {
- continue;
- }
-
- return {
- value: record.arg,
- done: context.done,
- };
- } else if (record.type === "throw") {
- state = GenStateCompleted;
- // Dispatch the exception by looping back around to the
- // context.dispatchException(context.arg) call above.
- context.method = "throw";
- context.arg = record.arg;
- }
- }
- };
- }
-
- // Call delegate.iterator[context.method](context.arg) and handle the
- // result, either by returning a { value, done } result from the
- // delegate iterator, or by modifying context.method and context.arg,
- // setting context.delegate to null, and returning the ContinueSentinel.
- function maybeInvokeDelegate(delegate, context) {
- var method = delegate.iterator[context.method];
- if (method === undefined) {
- // A .throw or .return when the delegate iterator has no .throw
- // method always terminates the yield* loop.
- context.delegate = null;
-
- if (context.method === "throw") {
- // Note: ["return"] must be used for ES3 parsing compatibility.
- if (delegate.iterator["return"]) {
- // If the delegate iterator has a return method, give it a
- // chance to clean up.
- context.method = "return";
- context.arg = undefined;
- maybeInvokeDelegate(delegate, context);
-
- if (context.method === "throw") {
- // If maybeInvokeDelegate(context) changed context.method from
- // "return" to "throw", let that override the TypeError below.
- return ContinueSentinel;
- }
- }
-
- context.method = "throw";
- context.arg = new TypeError("The iterator does not provide a 'throw' method");
- }
-
- return ContinueSentinel;
- }
-
- var record = tryCatch(method, delegate.iterator, context.arg);
-
- if (record.type === "throw") {
- context.method = "throw";
- context.arg = record.arg;
- context.delegate = null;
- return ContinueSentinel;
- }
-
- var info = record.arg;
-
- if (!info) {
- context.method = "throw";
- context.arg = new TypeError("iterator result is not an object");
- context.delegate = null;
- return ContinueSentinel;
- }
-
- if (info.done) {
- // Assign the result of the finished delegate to the temporary
- // variable specified by delegate.resultName (see delegateYield).
- context[delegate.resultName] = info.value;
-
- // Resume execution at the desired location (see delegateYield).
- context.next = delegate.nextLoc;
-
- // If context.method was "throw" but the delegate handled the
- // exception, let the outer generator proceed normally. If
- // context.method was "next", forget context.arg since it has been
- // "consumed" by the delegate iterator. If context.method was
- // "return", allow the original .return call to continue in the
- // outer generator.
- if (context.method !== "return") {
- context.method = "next";
- context.arg = undefined;
- }
- } else {
- // Re-yield the result returned by the delegate method.
- return info;
- }
-
- // The delegate iterator is finished, so forget it and continue with
- // the outer generator.
- context.delegate = null;
- return ContinueSentinel;
- }
-
- // Define Generator.prototype.{next,throw,return} in terms of the
- // unified ._invoke helper method.
- defineIteratorMethods(Gp);
-
- define(Gp, toStringTagSymbol, "Generator");
-
- // A Generator should always return itself as the iterator object when the
- // @@iterator function is called on it. Some browsers' implementations of the
- // iterator prototype chain incorrectly implement this, causing the Generator
- // object to not be returned from this call. This ensures that doesn't happen.
- // See https://github.com/facebook/regenerator/issues/274 for more details.
- define(Gp, iteratorSymbol, function () {
- return this;
- });
-
- define(Gp, "toString", function () {
- return "[object Generator]";
- });
-
- function pushTryEntry(locs) {
- var entry = { tryLoc: locs[0] };
-
- if (1 in locs) {
- entry.catchLoc = locs[1];
- }
-
- if (2 in locs) {
- entry.finallyLoc = locs[2];
- entry.afterLoc = locs[3];
- }
-
- this.tryEntries.push(entry);
- }
-
- function resetTryEntry(entry) {
- var record = entry.completion || {};
- record.type = "normal";
- delete record.arg;
- entry.completion = record;
- }
-
- function Context(tryLocsList) {
- // The root entry object (effectively a try statement without a catch
- // or a finally block) gives us a place to store values thrown from
- // locations where there is no enclosing try statement.
- this.tryEntries = [{ tryLoc: "root" }];
- tryLocsList.forEach(pushTryEntry, this);
- this.reset(true);
- }
-
- exports.keys = function (object) {
- var keys = [];
- for (var key in object) {
- keys.push(key);
- }
- keys.reverse();
-
- // Rather than returning an object with a next method, we keep
- // things simple and return the next function itself.
- return function next() {
- while (keys.length) {
- var key = keys.pop();
- if (key in object) {
- next.value = key;
- next.done = false;
- return next;
- }
- }
-
- // To avoid creating an additional object, we just hang the .value
- // and .done properties off the next function object itself. This
- // also ensures that the minifier will not anonymize the function.
- next.done = true;
- return next;
- };
- };
-
- function values(iterable) {
- if (iterable) {
- var iteratorMethod = iterable[iteratorSymbol];
- if (iteratorMethod) {
- return iteratorMethod.call(iterable);
- }
-
- if (typeof iterable.next === "function") {
- return iterable;
- }
-
- if (!isNaN(iterable.length)) {
- var i = -1,
- next = function next() {
- while (++i < iterable.length) {
- if (hasOwn.call(iterable, i)) {
- next.value = iterable[i];
- next.done = false;
- return next;
- }
- }
-
- next.value = undefined;
- next.done = true;
-
- return next;
- };
-
- return (next.next = next);
- }
- }
-
- // Return an iterator with no values.
- return { next: doneResult };
- }
- exports.values = values;
-
- function doneResult() {
- return { value: undefined, done: true };
- }
-
- Context.prototype = {
- constructor: Context,
-
- reset: function (skipTempReset) {
- this.prev = 0;
- this.next = 0;
- // Resetting context._sent for legacy support of Babel's
- // function.sent implementation.
- this.sent = this._sent = undefined;
- this.done = false;
- this.delegate = null;
-
- this.method = "next";
- this.arg = undefined;
-
- this.tryEntries.forEach(resetTryEntry);
-
- if (!skipTempReset) {
- for (var name in this) {
- // Not sure about the optimal order of these conditions:
- if (name.charAt(0) === "t" && hasOwn.call(this, name) && !isNaN(+name.slice(1))) {
- this[name] = undefined;
- }
- }
- }
- },
-
- stop: function () {
- this.done = true;
-
- var rootEntry = this.tryEntries[0];
- var rootRecord = rootEntry.completion;
- if (rootRecord.type === "throw") {
- throw rootRecord.arg;
- }
-
- return this.rval;
- },
-
- dispatchException: function (exception) {
- if (this.done) {
- throw exception;
- }
-
- var context = this;
- function handle(loc, caught) {
- record.type = "throw";
- record.arg = exception;
- context.next = loc;
-
- if (caught) {
- // If the dispatched exception was caught by a catch block,
- // then let that catch block handle the exception normally.
- context.method = "next";
- context.arg = undefined;
- }
-
- return !!caught;
- }
-
- for (var i = this.tryEntries.length - 1; i >= 0; --i) {
- var entry = this.tryEntries[i];
- var record = entry.completion;
-
- if (entry.tryLoc === "root") {
- // Exception thrown outside of any try block that could handle
- // it, so set the completion value of the entire function to
- // throw the exception.
- return handle("end");
- }
-
- if (entry.tryLoc <= this.prev) {
- var hasCatch = hasOwn.call(entry, "catchLoc");
- var hasFinally = hasOwn.call(entry, "finallyLoc");
-
- if (hasCatch && hasFinally) {
- if (this.prev < entry.catchLoc) {
- return handle(entry.catchLoc, true);
- } else if (this.prev < entry.finallyLoc) {
- return handle(entry.finallyLoc);
- }
- } else if (hasCatch) {
- if (this.prev < entry.catchLoc) {
- return handle(entry.catchLoc, true);
- }
- } else if (hasFinally) {
- if (this.prev < entry.finallyLoc) {
- return handle(entry.finallyLoc);
- }
- } else {
- throw new Error("try statement without catch or finally");
- }
- }
- }
- },
-
- abrupt: function (type, arg) {
- for (var i = this.tryEntries.length - 1; i >= 0; --i) {
- var entry = this.tryEntries[i];
- if (entry.tryLoc <= this.prev && hasOwn.call(entry, "finallyLoc") && this.prev < entry.finallyLoc) {
- var finallyEntry = entry;
- break;
- }
- }
-
- if (
- finallyEntry &&
- (type === "break" || type === "continue") &&
- finallyEntry.tryLoc <= arg &&
- arg <= finallyEntry.finallyLoc
- ) {
- // Ignore the finally entry if control is not jumping to a
- // location outside the try/catch block.
- finallyEntry = null;
- }
-
- var record = finallyEntry ? finallyEntry.completion : {};
- record.type = type;
- record.arg = arg;
-
- if (finallyEntry) {
- this.method = "next";
- this.next = finallyEntry.finallyLoc;
- return ContinueSentinel;
- }
-
- return this.complete(record);
- },
-
- complete: function (record, afterLoc) {
- if (record.type === "throw") {
- throw record.arg;
- }
-
- if (record.type === "break" || record.type === "continue") {
- this.next = record.arg;
- } else if (record.type === "return") {
- this.rval = this.arg = record.arg;
- this.method = "return";
- this.next = "end";
- } else if (record.type === "normal" && afterLoc) {
- this.next = afterLoc;
- }
-
- return ContinueSentinel;
- },
-
- finish: function (finallyLoc) {
- for (var i = this.tryEntries.length - 1; i >= 0; --i) {
- var entry = this.tryEntries[i];
- if (entry.finallyLoc === finallyLoc) {
- this.complete(entry.completion, entry.afterLoc);
- resetTryEntry(entry);
- return ContinueSentinel;
- }
- }
- },
-
- catch: function (tryLoc) {
- for (var i = this.tryEntries.length - 1; i >= 0; --i) {
- var entry = this.tryEntries[i];
- if (entry.tryLoc === tryLoc) {
- var record = entry.completion;
- if (record.type === "throw") {
- var thrown = record.arg;
- resetTryEntry(entry);
- }
- return thrown;
- }
- }
-
- // The context.catch method must only be called with a location
- // argument that corresponds to a known catch block.
- throw new Error("illegal catch attempt");
- },
-
- delegateYield: function (iterable, resultName, nextLoc) {
- this.delegate = {
- iterator: values(iterable),
- resultName: resultName,
- nextLoc: nextLoc,
- };
-
- if (this.method === "next") {
- // Deliberately forget the last sent value so that we don't
- // accidentally pass it on to the delegate.
- this.arg = undefined;
- }
-
- return ContinueSentinel;
- },
- };
-
- // Regardless of whether this script is executing as a CommonJS module
- // or not, return the runtime object so that we can declare the variable
- // regeneratorRuntime in the outer scope, which allows this module to be
- // injected easily by `bin/regenerator --include-runtime script.js`.
- return exports;
-})(
- // If this script is executing as a CommonJS module, use module.exports
- // as the regeneratorRuntime namespace. Otherwise create a new empty
- // object. Either way, the resulting object will be used to initialize
- // the regeneratorRuntime variable at the top of this file.
- typeof module === "object" ? module.exports : {},
-);
-
-try {
- regeneratorRuntime = runtime;
-} catch (accidentalStrictMode) {
- // This module should not be running in strict mode, so the above
- // assignment should always work unless something is misconfigured. Just
- // in case runtime.js accidentally runs in strict mode, in modern engines
- // we can explicitly access globalThis. In older engines we can escape
- // strict mode using a global Function call. This could conceivably fail
- // if a Content Security Policy forbids using Function, but in that case
- // the proper solution is to fix the accidental strict mode problem. If
- // you've misconfigured your bundler to force strict mode and applied a
- // CSP to forbid Function, and you're not willing to fix either of those
- // problems, please detail your unique predicament in a GitHub issue.
- if (typeof globalThis === "object") {
- globalThis.regeneratorRuntime = runtime;
- } else {
- Function("r", "regeneratorRuntime = r")(runtime);
- }
-}
diff --git a/src/sql/postgres.zig b/src/sql/postgres.zig
index 009146cdf9b47..dd90a550e1cb5 100644
--- a/src/sql/postgres.zig
+++ b/src/sql/postgres.zig
@@ -3514,13 +3514,13 @@ pub const PostgresSQLConnection = struct {
}
}
- // #define pg_hton16(x) (x)
- // #define pg_hton32(x) (x)
- // #define pg_hton64(x) (x)
+ // #define pg_hton16(x) (x)
+ // #define pg_hton32(x) (x)
+ // #define pg_hton64(x) (x)
- // #define pg_ntoh16(x) (x)
- // #define pg_ntoh32(x) (x)
- // #define pg_ntoh64(x) (x)
+ // #define pg_ntoh16(x) (x)
+ // #define pg_ntoh32(x) (x)
+ // #define pg_ntoh64(x) (x)
fn pg_ntoT(comptime IntSize: usize, i: anytype) std.meta.Int(.unsigned, IntSize) {
@setRuntimeSafety(false);
diff --git a/src/string_immutable.zig b/src/string_immutable.zig
index 277c9e5fdafe9..191d5b97c771d 100644
--- a/src/string_immutable.zig
+++ b/src/string_immutable.zig
@@ -4649,6 +4649,12 @@ pub fn trim(slice: anytype, comptime values_to_strip: []const u8) @TypeOf(slice)
return slice[begin..end];
}
+pub fn isAllWhitespace(slice: []const u8) bool {
+ var begin: usize = 0;
+ while (begin < slice.len and std.mem.indexOfScalar(u8, &whitespace_chars, slice[begin]) != null) : (begin += 1) {}
+ return begin == slice.len;
+}
+
pub const whitespace_chars = [_]u8{ ' ', '\t', '\n', '\r', std.ascii.control_code.vt, std.ascii.control_code.ff };
pub fn lengthOfLeadingWhitespaceASCII(slice: string) usize {
diff --git a/test/bundler/bundler_browser.test.ts b/test/bundler/bundler_browser.test.ts
index 40ddd09226cae..49bb5f6853744 100644
--- a/test/bundler/bundler_browser.test.ts
+++ b/test/bundler/bundler_browser.test.ts
@@ -55,6 +55,9 @@ describe("bundler", () => {
run: {
stdout: "function\nfunction\nundefined",
},
+ onAfterBundle(api) {
+ api.expectFile('out.js').not.toInclude('import ');
+ },
});
itBundled("browser/NodeTTY", {
files: {
@@ -69,6 +72,9 @@ describe("bundler", () => {
run: {
stdout: "function\nfunction\nfalse",
},
+ onAfterBundle(api) {
+ api.expectFile('out.js').not.toInclude('import ');
+ },
});
// TODO: use nodePolyfillList to generate the code in here.
const NodePolyfills = itBundled("browser/NodePolyfills", {
diff --git a/test/bundler/esbuild/tsconfig.test.ts b/test/bundler/esbuild/tsconfig.test.ts
index b62a6d76947e4..0b108dfc9c2eb 100644
--- a/test/bundler/esbuild/tsconfig.test.ts
+++ b/test/bundler/esbuild/tsconfig.test.ts
@@ -367,7 +367,6 @@ describe("bundler", () => {
},
});
itBundled("tsconfig/JSX", {
- // GENERATED
files: {
"/Users/user/project/entry.tsx": `console.log(<>>)`,
"/Users/user/project/node_modules/react/jsx-dev-runtime.ts": `
@@ -399,7 +398,6 @@ describe("bundler", () => {
},
});
itBundled("tsconfig/ReactJSXNotReact", {
- // GENERATED
files: {
"/Users/user/project/entry.tsx": `console.log(<>>)`,
"/Users/user/project/node_modules/notreact/jsx-runtime.ts": `
@@ -428,7 +426,6 @@ describe("bundler", () => {
},
});
itBundled("tsconfig/ReactJSXNotReactScoped", {
- // GENERATED
files: {
"/Users/user/project/entry.tsx": `console.log(<>>)`,
"/Users/user/project/node_modules/@notreact/jsx/jsx-runtime.ts": `
@@ -457,7 +454,6 @@ describe("bundler", () => {
},
});
itBundled("tsconfig/ReactJSXDevNotReact", {
- // GENERATED
files: {
"/Users/user/project/entry.tsx": `console.log(<>>)`,
"/Users/user/project/node_modules/notreact/jsx-dev-runtime.ts": `
@@ -484,7 +480,6 @@ describe("bundler", () => {
},
});
itBundled("tsconfig/ReactJSXDev", {
- // GENERATED
files: {
"/Users/user/project/entry.tsx": `console.log(<>>)`,
"/Users/user/project/node_modules/react/jsx-dev-runtime.ts": `
@@ -510,7 +505,6 @@ describe("bundler", () => {
},
});
itBundled("tsconfig/ReactJSXDevTSConfigProduction", {
- // GENERATED
files: {
"/Users/user/project/entry.tsx": `console.log(<>>)`,
"/Users/user/project/node_modules/react/jsx-dev-runtime.ts": `
@@ -539,7 +533,6 @@ describe("bundler", () => {
},
});
itBundled("tsconfig/ReactJSX", {
- // GENERATED
files: {
"/Users/user/project/entry.tsx": `console.log(<>>)`,
"/Users/user/project/node_modules/react/jsx-runtime.ts": `
@@ -562,7 +555,6 @@ describe("bundler", () => {
});
itBundled("tsconfig/ReactJSXClassic", {
- // GENERATED
files: {
"/Users/user/project/entry.tsx": `console.log(<>>)`,
"/Users/user/project/node_modules/react/jsx-dev-runtime.ts": `
@@ -589,7 +581,6 @@ describe("bundler", () => {
},
});
itBundled("tsconfig/ReactJSXClassicWithNODE_ENV=Production", {
- // GENERATED
files: {
"/Users/user/project/entry.tsx": `console.log(<>>)`,
"/Users/user/project/node_modules/react/jsx-dev-runtime.ts": `
@@ -620,7 +611,6 @@ describe("bundler", () => {
});
itBundled("tsconfig/ReactJSXClassicWithNODE_ENV=Development", {
- // GENERATED
files: {
"/Users/user/project/entry.tsx": `console.log(<>>)`,
"/Users/user/project/node_modules/react/jsx-dev-runtime.ts": `