From ad920cb58016ed69115b0a21eaa45746c0e49d34 Mon Sep 17 00:00:00 2001 From: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com> Date: Fri, 12 Jan 2024 17:08:52 -0800 Subject: [PATCH 01/74] Skeleton code for `bun run --workspace` --- src/cli.zig | 14 ++++++++++++++ src/cli/run_command.zig | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) diff --git a/src/cli.zig b/src/cli.zig index 0fba3078d6b40..69789fb02adae 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -196,6 +196,7 @@ pub const Arguments = struct { const run_only_params = [_]ParamType{ clap.parseParam("--silent Don't print the script command") catch unreachable, clap.parseParam("-b, --bun Force a script or package to use Bun's runtime instead of Node.js (via symlinking node)") catch unreachable, + clap.parseParam("-w, --workspace Run a script from a workspace member package") catch unreachable, }; pub const run_params = run_only_params ++ runtime_params_ ++ transpiler_params_ ++ base_params_; @@ -517,6 +518,10 @@ pub const Arguments = struct { if (cmd == .AutoCommand or cmd == .RunCommand or cmd == .TestCommand or cmd == .RunAsNodeCommand) { const preloads = args.options("--preload"); + if (comptime cmd == .RunCommand) { + ctx.run_options.workspace = args.option("--workspace") orelse ""; + } + if (args.flag("--hot")) { ctx.debug.hot_reload = .hot; } else if (args.flag("--watch")) { @@ -1086,10 +1091,19 @@ pub const Command = struct { test_options: TestOptions = TestOptions{}, bundler_options: BundlerOptions = BundlerOptions{}, runtime_options: RuntimeOptions = RuntimeOptions{}, + run_options: RunOptions = RunOptions{}, preloads: []const string = &[_]string{}, has_loaded_global_config: bool = false, + pub const RunOptions = struct { + workspace: []const u8 = "", + + pub fn hasWorkspace(this: *const RunOptions) bool { + return this.workspace.len > 0; + } + }; + pub const BundlerOptions = struct { compile: bool = false, diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 21c1f2706d996..ea09585f34c9f 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1026,6 +1026,41 @@ pub const RunCommand = struct { const passthrough = ctx.passthrough; const force_using_bun = ctx.debug.run_in_bun; + // is there a --workspace flag set? + if (ctx.run_options.hasWorkspace()) { + var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); + var load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); + + if (load_lockfile == .not_found) { + var root = Lockfile.Package{}; + try manager.lockfile.initEmpty(ctx.allocator); + try root.parseMain( + manager.lockfile, + ctx.allocator, + ctx.log, + package_json_source, + Features.main, + ); + + _ = try manager.lockfile.appendPackage(root); + } + + if (manager.lockfile.workspace_paths.get(bun.hash(ctx.run_options.workspace))) |path_ptr| { + const path = manager.lockfile.str(path_ptr); + var local_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + const package_json_path = Path.joinZBuf( + &local_buf, + &[_]string{ path, "package.json" }, + .auto, + ); + // check if package.json exists! + // if it does exist, then let's set cwd + std.os.chdir(path) catch |err| { + // print some message saying we couldn't change directory + Global.crash(); + }; + } // else if it's not found, try the passed in directory as a package.json + } // This doesn't cover every case if ((script_name_to_search.len == 1 and script_name_to_search[0] == '.') or From 6be006e57ebc7c4fb44eb1a0ad554112badc2b1d Mon Sep 17 00:00:00 2001 From: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com> Date: Fri, 12 Jan 2024 17:11:53 -0800 Subject: [PATCH 02/74] Update run_command.zig --- src/cli/run_command.zig | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index ea09585f34c9f..0b2362454181e 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1038,6 +1038,16 @@ pub const RunCommand = struct { manager.lockfile, ctx.allocator, ctx.log, + // people should be able to use `bun run --workspace` from + // directories which do not immediately contain a + // package.json or bun.lockb + // + // + // for example, if I'm in bun/src, i should be able to run + // + // > bun run --workspace=test foo + // + // see also https://github.com/oven-sh/bun/blob/fdb095a32f542e109c278d02f9bbca4552ffde97/src/install/install.zig#L6080-L6118 package_json_source, Features.main, ); From a734de88e51aa2525a78230e8be4aae6046555bf Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Sat, 13 Jan 2024 17:01:03 -0800 Subject: [PATCH 03/74] implement directory traversal to find workspace root --- CMakeLists.txt | 2 + src/cli/run_command.zig | 228 +++++++++++++++++++++++++++++++++------- 2 files changed, 193 insertions(+), 37 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 73813a0d43684..488fc4a7f5509 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -5,6 +5,8 @@ cmake_policy(SET CMP0067 NEW) set(Bun_VERSION "1.0.23") set(WEBKIT_TAG b4de09f41b83e9e5c0e43ef414f1aee5968b6f7c) +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}") message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}") diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 0b2362454181e..4982620536d2e 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -34,6 +34,9 @@ const resolve_path = @import("../resolver/resolve_path.zig"); const configureTransformOptionsForBun = @import("../bun.js/config.zig").configureTransformOptionsForBun; const bundler = bun.bundler; +const Path = @import("../resolver/resolve_path.zig"); +const Fs = @import("../fs.zig"); +const Package = @import("../install/lockfile.zig").Package; const DotEnv = @import("../env_loader.zig"); const which = @import("../which.zig").which; const Run = @import("../bun_js.zig").Run; @@ -55,6 +58,11 @@ const Lockfile = @import("../install/lockfile.zig"); const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess; +pub fn initializeStore() void { + bun.JSAst.Expr.Data.Store.create(default_allocator); + bun.JSAst.Stmt.Data.Store.create(default_allocator); +} + pub const RunCommand = struct { const shells_to_search = &[_]string{ "bash", @@ -1010,6 +1018,70 @@ pub const RunCommand = struct { Output.flush(); } + fn findWorkspaceRoot(ctx: *Command.Context) !Package.WorkspaceMap { + var package_json_cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + var workdir_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + var workdir: []const u8 = try std.os.getcwd(workdir_buf[0..]); + + while (true) { + // std.debug.print("workdir: {s}\n", .{workdir}); + const parent_trimmed = strings.withoutTrailingSlash(workdir); + var buf2: [bun.MAX_PATH_BYTES + 1]u8 = undefined; + @memcpy(buf2[0..parent_trimmed.len], parent_trimmed); + buf2[parent_trimmed.len..buf2.len][0.."/package.json".len].* = "/package.json".*; + buf2[parent_trimmed.len + "/package.json".len] = 0; + + const json_file = std.fs.cwd().openFileZ( + buf2[0 .. parent_trimmed.len + "/package.json".len :0].ptr, + .{ .mode = .read_only }, + ) catch { + workdir = std.fs.path.dirname(workdir) orelse return error.MissingPackageJSON; + continue; + }; + defer json_file.close(); + + // std.debug.print("found package.json at {s}\n", .{workdir}); + + const json_stat_size = try json_file.getEndPos(); + const json_buf = try ctx.allocator.alloc(u8, json_stat_size + 64); + defer ctx.allocator.free(json_buf); + const json_len = try json_file.preadAll(json_buf, 0); + const json_path = try bun.getFdPath(json_file.handle, &package_json_cwd_buf); + const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); + initializeStore(); + const json = try json_parser.ParseJSONUTF8(&json_source, ctx.log, ctx.allocator); + if (json.asProperty("workspaces")) |prop| { + // std.debug.print("package.json has workspace property\n", .{}); + const json_array = switch (prop.expr.data) { + .e_array => |arr| arr, + .e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) { + .e_array => |arr| arr, + else => break, + } else break, + else => break, + }; + var workspace_names = Package.WorkspaceMap.init(ctx.allocator); + _ = Package.processWorkspaceNamesArray( + &workspace_names, + ctx.allocator, + ctx.log, + json_array, + &json_source, + prop.loc, + null, + ) catch |err| { + // std.debug.print("error: {s}\n", .{@errorName(err)}); + // break; + return err; + }; + + return workspace_names; + } + } + + return error.MissingPackageJSON; + } + pub fn exec(ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !bool { var ctx = ctx_; // Step 1. Figure out what we're trying to run @@ -1026,50 +1098,129 @@ pub const RunCommand = struct { const passthrough = ctx.passthrough; const force_using_bun = ctx.debug.run_in_bun; + // is there a --workspace flag set? if (ctx.run_options.hasWorkspace()) { var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); - var load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); + const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); if (load_lockfile == .not_found) { - var root = Lockfile.Package{}; - try manager.lockfile.initEmpty(ctx.allocator); - try root.parseMain( - manager.lockfile, - ctx.allocator, - ctx.log, - // people should be able to use `bun run --workspace` from - // directories which do not immediately contain a - // package.json or bun.lockb - // - // - // for example, if I'm in bun/src, i should be able to run - // - // > bun run --workspace=test foo - // - // see also https://github.com/oven-sh/bun/blob/fdb095a32f542e109c278d02f9bbca4552ffde97/src/install/install.zig#L6080-L6118 - package_json_source, - Features.main, - ); - - _ = try manager.lockfile.appendPackage(root); - } + std.debug.print("no lockfile found\n", .{}); + // if there is no lockfile, search for a package.json with a workspace decl - if (manager.lockfile.workspace_paths.get(bun.hash(ctx.run_options.workspace))) |path_ptr| { - const path = manager.lockfile.str(path_ptr); - var local_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - const package_json_path = Path.joinZBuf( - &local_buf, - &[_]string{ path, "package.json" }, - .auto, - ); - // check if package.json exists! - // if it does exist, then let's set cwd - std.os.chdir(path) catch |err| { - // print some message saying we couldn't change directory - Global.crash(); + const wsmap = findWorkspaceRoot(&ctx) catch |err| { + if (err == error.MissingPackageJSON) { + Output.err("error", "no package.json found", .{}); + Global.crash(); + } + std.debug.print("error: {s}\n", .{@errorName(err)}); + return false; }; - } // else if it's not found, try the passed in directory as a package.json + + initializeStore(); + for (wsmap.keys()) |path| { + var local_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined; + const pkgstring = "/package.json"; + @memcpy(local_buf[0..path.len], path); + local_buf[path.len..local_buf.len][0..pkgstring.len].* = pkgstring.*; + local_buf[path.len + pkgstring.len] = 0; + + std.debug.print("checking package.json at: {s}\n", .{local_buf[0 .. path.len + pkgstring.len :0]}); + + const json_file = std.fs.cwd().openFileZ( + local_buf[0 .. path.len + pkgstring.len :0].ptr, + .{ .mode = .read_only }, + ) catch { + std.debug.print("error opening package.json\n", .{}); + continue; + }; + defer json_file.close(); + + // std.debug.print("found package.json at {s}\n", .{workdir}); + + const json_stat_size = try json_file.getEndPos(); + const json_buf = try ctx.allocator.alloc(u8, json_stat_size + 64); + defer ctx.allocator.free(json_buf); + const json_len = try json_file.preadAll(json_buf, 0); + var local_buf2: [bun.MAX_PATH_BYTES]u8 = undefined; + const json_path = try bun.getFdPath(json_file.handle, &local_buf2); + const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); + initializeStore(); + const json = try json_parser.ParseJSONUTF8(&json_source, ctx.log, ctx.allocator); + if (json.asProperty("name")) |prop| { + // std.debug.print("package.json has workspace property\n", .{}); + const name = switch (prop.expr.data) { + .e_string => |str| str.data, + else => { + // fail stating that name is not a stringo + Output.err("error", "package.json name is not a string", .{}); + continue; + }, + }; + // check if this is the workspace where we want to run the command + + std.debug.print("package name: {s}\n", .{name}); + if (strings.eql(name, ctx.run_options.workspace)) { + std.os.chdir(path) catch |err| { + // print some message saying we couldn't change directory + Output.err("error", "failed to change directory to target workspace: {s}", .{@errorName(err)}); + Global.crash(); + }; + std.debug.print("changed directory to {s}\n", .{path}); + break; + } + } + } + + // defer file.close(); + // const bytes = try file.readToEndAlloc(ctx.allocator, std.math.maxInt(usize)); + // defer ctx.allocator.free(bytes); + // const source = logger.Source.initPathString("", bytes); + + // var root = Lockfile.Package{}; + // try manager.lockfile.initEmpty(ctx.allocator); + // try root.parseMain( + // manager.lockfile, + // ctx.allocator, + // ctx.log, + // // people should be able to use `bun run --workspace` from + // // directories which do not immediately contain a + // // package.json or bun.lockb + // // + // // + // // for example, if I'm in bun/src, i should be able to run + // // + // // > bun run --workspace=test foo + // // + // // see also https://github.com/oven-sh/bun/blob/fdb095a32f542e109c278d02f9bbca4552ffde97/src/install/install.zig#L6080-L6118 + // source, + + // Features.main, + // ); + + // _ = try manager.lockfile.appendPackage(root); + } else { + std.debug.print("found lockfile\n", .{}); + + // if (manager.lockfile.workspace_paths.get(bun.hash(ctx.run_options.workspace))) |path_ptr| { + // const path = manager.lockfile.str(path_ptr); + // var local_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + // const package_json_path = Path.joinZBuf( + // &local_buf, + // &[_]string{ path, "package.json" }, + // .auto, + // ); + // _ = package_json_path; // autofix + // // check if package.json exists! + // // if it does exist, then let's set cwd + // std.os.chdir(path) catch |err| { + // _ = err; // autofix + + // // print some message saying we couldn't change directory + // Global.crash(); + // }; + // } // else if it's not found, try the passed in directory as a package.json + } } // This doesn't cover every case @@ -1340,6 +1491,9 @@ pub const RunCommand = struct { if (comptime log_errors) { Output.prettyError("error: Script not found \"{s}\"\n", .{script_name_to_search}); + // var workdir_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + // const workdir: []const u8 = try std.os.getcwd(workdir_buf[0..]); + // Output.err("error", "Current working directory: {s}", .{workdir}); Global.exit(1); } From c83507bf7327089e6ac8ea1f88cf5dc5192c2eac Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Mon, 15 Jan 2024 10:11:06 -0800 Subject: [PATCH 04/74] finish --workspace implementation --- src/cli.zig | 148 ++++++++++++++++++++++++++++--- src/cli/run_command.zig | 189 ---------------------------------------- 2 files changed, 134 insertions(+), 203 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index 69789fb02adae..30cd747147517 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -21,6 +21,8 @@ const js_ast = bun.JSAst; const linker = @import("linker.zig"); const RegularExpression = bun.RegularExpression; +const Package = @import("install/lockfile.zig").Package; + const sync = @import("./sync.zig"); const Api = @import("api/schema.zig").Api; const resolve_path = @import("./resolver/resolve_path.zig"); @@ -148,6 +150,7 @@ pub const Arguments = struct { const base_params_ = [_]ParamType{ clap.parseParam("--env-file ... Load environment variables from the specified file(s)") catch unreachable, clap.parseParam("--cwd Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable, + clap.parseParam("-w, --workspace Run a script from a workspace member package") catch unreachable, clap.parseParam("-c, --config ? Specify path to Bun config file. Default $cwd/bunfig.toml") catch unreachable, clap.parseParam("-h, --help Display this menu and exit") catch unreachable, clap.parseParam("...") catch unreachable, @@ -196,7 +199,6 @@ pub const Arguments = struct { const run_only_params = [_]ParamType{ clap.parseParam("--silent Don't print the script command") catch unreachable, clap.parseParam("-b, --bun Force a script or package to use Bun's runtime instead of Node.js (via symlinking node)") catch unreachable, - clap.parseParam("-w, --workspace Run a script from a workspace member package") catch unreachable, }; pub const run_params = run_only_params ++ runtime_params_ ++ transpiler_params_ ++ base_params_; @@ -367,6 +369,92 @@ pub const Arguments = struct { return try loadConfig(allocator, args.option("--config"), ctx, comptime cmd); } + fn findWorkspaceRoot(allocator: std.mem.Allocator, log: *logger.Log, workspace_map: *Package.WorkspaceMap, workdir_: []const u8) !void { + var workdir = workdir_; + var package_json_cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + + while (true) : (workdir = std.fs.path.dirname(workdir) orelse return error.MissingPackageJSON) { + const parent_trimmed = strings.withoutTrailingSlash(workdir); + var buf2: [bun.MAX_PATH_BYTES + 1]u8 = undefined; + @memcpy(buf2[0..parent_trimmed.len], parent_trimmed); + buf2[parent_trimmed.len..buf2.len][0.."/package.json".len].* = "/package.json".*; + buf2[parent_trimmed.len + "/package.json".len] = 0; + + const json_file = std.fs.cwd().openFileZ( + buf2[0 .. parent_trimmed.len + "/package.json".len :0].ptr, + .{ .mode = .read_only }, + ) catch continue; + defer json_file.close(); + + const json_stat_size = try json_file.getEndPos(); + const json_buf = try allocator.alloc(u8, json_stat_size + 64); + defer allocator.free(json_buf); + const json_len = try json_file.preadAll(json_buf, 0); + const json_path = try bun.getFdPath(json_file.handle, &package_json_cwd_buf); + const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); + const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator); + + const prop = json.asProperty("workspaces") orelse continue; + + const json_array = switch (prop.expr.data) { + .e_array => |arr| arr, + .e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) { + .e_array => |arr| arr, + else => break, + } else break, + else => break, + }; + _ = Package.processWorkspaceNamesArray( + workspace_map, + allocator, + log, + json_array, + &json_source, + prop.loc, + null, + ) catch |err| { + return err; + }; + return; + } + + return error.MissingPackageJSON; + } + + fn packageMatchesName(allocator: std.mem.Allocator, log: *logger.Log, path: []const u8, target_name: []const u8) !bool { + var local_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined; + const pkgstring = "/package.json"; + @memcpy(local_buf[0..path.len], path); + local_buf[path.len..local_buf.len][0..pkgstring.len].* = pkgstring.*; + local_buf[path.len + pkgstring.len] = 0; + + const json_file = std.fs.cwd().openFileZ( + local_buf[0 .. path.len + pkgstring.len :0].ptr, + .{ .mode = .read_only }, + ) catch { + return false; + }; + defer json_file.close(); + + const json_stat_size = try json_file.getEndPos(); + const json_buf = try allocator.alloc(u8, json_stat_size + 64); + defer allocator.free(json_buf); + const json_len = try json_file.preadAll(json_buf, 0); + var local_buf2: [bun.MAX_PATH_BYTES]u8 = undefined; + const json_path = try bun.getFdPath(json_file.handle, &local_buf2); + const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); + const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator); + const prop = json.asProperty("name") orelse return false; + const name = switch (prop.expr.data) { + .e_string => |str| str.data, + else => { + Output.err("error", "package.json name is not a string", .{}); + return false; + }, + }; + return strings.eql(name, target_name); + } + pub fn parse(allocator: std.mem.Allocator, ctx: *Command.Context, comptime cmd: Command.Tag) !Api.TransformOptions { var diag = clap.Diagnostic{}; const params_to_parse = comptime cmd.params(); @@ -417,6 +505,51 @@ pub const Arguments = struct { cwd = try bun.getcwdAlloc(allocator); } + if (args.option("--workspace")) |package_name| { + // TODO in the future we can try loading the lockfile to get the workspace information more quickly + // var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); + // const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); + // if (load_lockfile == .not_found) { + + bun.JSAst.Expr.Data.Store.create(default_allocator); + bun.JSAst.Stmt.Data.Store.create(default_allocator); + + var wsmap = Package.WorkspaceMap.init(allocator); + defer wsmap.deinit(); + // find the root package.json of the workspace and load the child packages into workspace map + findWorkspaceRoot(allocator, ctx.log, &wsmap, cwd) catch |err| { + Output.prettyErrorln("error resolving --workspace: {s}", .{@errorName(err)}); + Global.exit(1); + }; + + var found = false; + for (wsmap.keys()) |path| { + // check if the package at the path matches the name provided as an argument + const matches = packageMatchesName(allocator, ctx.log, path, package_name) catch |err| { + Output.prettyErrorln("error resolving --workspace: {s}", .{@errorName(err)}); + Global.exit(1); + }; + if (matches) { + cwd = try allocator.dupe(u8, path); + found = true; + break; + } + } + + bun.JSAst.Expr.Data.Store.reset(); + bun.JSAst.Stmt.Data.Store.reset(); + + // if we didn't find a package with the name provided, try to resolve it as a path + if (!found) { + var outbuf: [bun.MAX_PATH_BYTES]u8 = undefined; + const out = std.os.realpath(package_name, &outbuf) catch { + Output.prettyErrorln("error resolving --workspace: no package found with name {s}", .{package_name}); + Global.exit(1); + }; + cwd = try allocator.dupe(u8, out); + } + } + if (cmd == .TestCommand) { if (args.option("--timeout")) |timeout_ms| { if (timeout_ms.len > 0) { @@ -518,10 +651,6 @@ pub const Arguments = struct { if (cmd == .AutoCommand or cmd == .RunCommand or cmd == .TestCommand or cmd == .RunAsNodeCommand) { const preloads = args.options("--preload"); - if (comptime cmd == .RunCommand) { - ctx.run_options.workspace = args.option("--workspace") orelse ""; - } - if (args.flag("--hot")) { ctx.debug.hot_reload = .hot; } else if (args.flag("--watch")) { @@ -1091,19 +1220,10 @@ pub const Command = struct { test_options: TestOptions = TestOptions{}, bundler_options: BundlerOptions = BundlerOptions{}, runtime_options: RuntimeOptions = RuntimeOptions{}, - run_options: RunOptions = RunOptions{}, preloads: []const string = &[_]string{}, has_loaded_global_config: bool = false, - pub const RunOptions = struct { - workspace: []const u8 = "", - - pub fn hasWorkspace(this: *const RunOptions) bool { - return this.workspace.len > 0; - } - }; - pub const BundlerOptions = struct { compile: bool = false, diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 4982620536d2e..b0e553829aabc 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1003,7 +1003,6 @@ pub const RunCommand = struct { } // Output.prettyln("\n{d} scripts", .{scripts.count()}); - Output.flush(); // return true; @@ -1018,70 +1017,6 @@ pub const RunCommand = struct { Output.flush(); } - fn findWorkspaceRoot(ctx: *Command.Context) !Package.WorkspaceMap { - var package_json_cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - var workdir_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - var workdir: []const u8 = try std.os.getcwd(workdir_buf[0..]); - - while (true) { - // std.debug.print("workdir: {s}\n", .{workdir}); - const parent_trimmed = strings.withoutTrailingSlash(workdir); - var buf2: [bun.MAX_PATH_BYTES + 1]u8 = undefined; - @memcpy(buf2[0..parent_trimmed.len], parent_trimmed); - buf2[parent_trimmed.len..buf2.len][0.."/package.json".len].* = "/package.json".*; - buf2[parent_trimmed.len + "/package.json".len] = 0; - - const json_file = std.fs.cwd().openFileZ( - buf2[0 .. parent_trimmed.len + "/package.json".len :0].ptr, - .{ .mode = .read_only }, - ) catch { - workdir = std.fs.path.dirname(workdir) orelse return error.MissingPackageJSON; - continue; - }; - defer json_file.close(); - - // std.debug.print("found package.json at {s}\n", .{workdir}); - - const json_stat_size = try json_file.getEndPos(); - const json_buf = try ctx.allocator.alloc(u8, json_stat_size + 64); - defer ctx.allocator.free(json_buf); - const json_len = try json_file.preadAll(json_buf, 0); - const json_path = try bun.getFdPath(json_file.handle, &package_json_cwd_buf); - const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); - initializeStore(); - const json = try json_parser.ParseJSONUTF8(&json_source, ctx.log, ctx.allocator); - if (json.asProperty("workspaces")) |prop| { - // std.debug.print("package.json has workspace property\n", .{}); - const json_array = switch (prop.expr.data) { - .e_array => |arr| arr, - .e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) { - .e_array => |arr| arr, - else => break, - } else break, - else => break, - }; - var workspace_names = Package.WorkspaceMap.init(ctx.allocator); - _ = Package.processWorkspaceNamesArray( - &workspace_names, - ctx.allocator, - ctx.log, - json_array, - &json_source, - prop.loc, - null, - ) catch |err| { - // std.debug.print("error: {s}\n", .{@errorName(err)}); - // break; - return err; - }; - - return workspace_names; - } - } - - return error.MissingPackageJSON; - } - pub fn exec(ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !bool { var ctx = ctx_; // Step 1. Figure out what we're trying to run @@ -1099,130 +1034,6 @@ pub const RunCommand = struct { const passthrough = ctx.passthrough; const force_using_bun = ctx.debug.run_in_bun; - // is there a --workspace flag set? - if (ctx.run_options.hasWorkspace()) { - var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); - const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); - - if (load_lockfile == .not_found) { - std.debug.print("no lockfile found\n", .{}); - // if there is no lockfile, search for a package.json with a workspace decl - - const wsmap = findWorkspaceRoot(&ctx) catch |err| { - if (err == error.MissingPackageJSON) { - Output.err("error", "no package.json found", .{}); - Global.crash(); - } - std.debug.print("error: {s}\n", .{@errorName(err)}); - return false; - }; - - initializeStore(); - for (wsmap.keys()) |path| { - var local_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined; - const pkgstring = "/package.json"; - @memcpy(local_buf[0..path.len], path); - local_buf[path.len..local_buf.len][0..pkgstring.len].* = pkgstring.*; - local_buf[path.len + pkgstring.len] = 0; - - std.debug.print("checking package.json at: {s}\n", .{local_buf[0 .. path.len + pkgstring.len :0]}); - - const json_file = std.fs.cwd().openFileZ( - local_buf[0 .. path.len + pkgstring.len :0].ptr, - .{ .mode = .read_only }, - ) catch { - std.debug.print("error opening package.json\n", .{}); - continue; - }; - defer json_file.close(); - - // std.debug.print("found package.json at {s}\n", .{workdir}); - - const json_stat_size = try json_file.getEndPos(); - const json_buf = try ctx.allocator.alloc(u8, json_stat_size + 64); - defer ctx.allocator.free(json_buf); - const json_len = try json_file.preadAll(json_buf, 0); - var local_buf2: [bun.MAX_PATH_BYTES]u8 = undefined; - const json_path = try bun.getFdPath(json_file.handle, &local_buf2); - const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); - initializeStore(); - const json = try json_parser.ParseJSONUTF8(&json_source, ctx.log, ctx.allocator); - if (json.asProperty("name")) |prop| { - // std.debug.print("package.json has workspace property\n", .{}); - const name = switch (prop.expr.data) { - .e_string => |str| str.data, - else => { - // fail stating that name is not a stringo - Output.err("error", "package.json name is not a string", .{}); - continue; - }, - }; - // check if this is the workspace where we want to run the command - - std.debug.print("package name: {s}\n", .{name}); - if (strings.eql(name, ctx.run_options.workspace)) { - std.os.chdir(path) catch |err| { - // print some message saying we couldn't change directory - Output.err("error", "failed to change directory to target workspace: {s}", .{@errorName(err)}); - Global.crash(); - }; - std.debug.print("changed directory to {s}\n", .{path}); - break; - } - } - } - - // defer file.close(); - // const bytes = try file.readToEndAlloc(ctx.allocator, std.math.maxInt(usize)); - // defer ctx.allocator.free(bytes); - // const source = logger.Source.initPathString("", bytes); - - // var root = Lockfile.Package{}; - // try manager.lockfile.initEmpty(ctx.allocator); - // try root.parseMain( - // manager.lockfile, - // ctx.allocator, - // ctx.log, - // // people should be able to use `bun run --workspace` from - // // directories which do not immediately contain a - // // package.json or bun.lockb - // // - // // - // // for example, if I'm in bun/src, i should be able to run - // // - // // > bun run --workspace=test foo - // // - // // see also https://github.com/oven-sh/bun/blob/fdb095a32f542e109c278d02f9bbca4552ffde97/src/install/install.zig#L6080-L6118 - // source, - - // Features.main, - // ); - - // _ = try manager.lockfile.appendPackage(root); - } else { - std.debug.print("found lockfile\n", .{}); - - // if (manager.lockfile.workspace_paths.get(bun.hash(ctx.run_options.workspace))) |path_ptr| { - // const path = manager.lockfile.str(path_ptr); - // var local_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - // const package_json_path = Path.joinZBuf( - // &local_buf, - // &[_]string{ path, "package.json" }, - // .auto, - // ); - // _ = package_json_path; // autofix - // // check if package.json exists! - // // if it does exist, then let's set cwd - // std.os.chdir(path) catch |err| { - // _ = err; // autofix - - // // print some message saying we couldn't change directory - // Global.crash(); - // }; - // } // else if it's not found, try the passed in directory as a package.json - } - } - // This doesn't cover every case if ((script_name_to_search.len == 1 and script_name_to_search[0] == '.') or (script_name_to_search.len == 2 and @as(u16, @bitCast(script_name_to_search[0..2].*)) == @as(u16, @bitCast([_]u8{ '.', '/' })))) From 1868ad366583f5ee77bdbea7bacce8b5a98b35cc Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Mon, 15 Jan 2024 10:18:57 -0800 Subject: [PATCH 05/74] clean up changes in run_command.zig --- src/cli/run_command.zig | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index b0e553829aabc..3c172351020e5 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -34,9 +34,6 @@ const resolve_path = @import("../resolver/resolve_path.zig"); const configureTransformOptionsForBun = @import("../bun.js/config.zig").configureTransformOptionsForBun; const bundler = bun.bundler; -const Path = @import("../resolver/resolve_path.zig"); -const Fs = @import("../fs.zig"); -const Package = @import("../install/lockfile.zig").Package; const DotEnv = @import("../env_loader.zig"); const which = @import("../which.zig").which; const Run = @import("../bun_js.zig").Run; @@ -58,11 +55,6 @@ const Lockfile = @import("../install/lockfile.zig"); const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess; -pub fn initializeStore() void { - bun.JSAst.Expr.Data.Store.create(default_allocator); - bun.JSAst.Stmt.Data.Store.create(default_allocator); -} - pub const RunCommand = struct { const shells_to_search = &[_]string{ "bash", @@ -1302,9 +1294,6 @@ pub const RunCommand = struct { if (comptime log_errors) { Output.prettyError("error: Script not found \"{s}\"\n", .{script_name_to_search}); - // var workdir_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - // const workdir: []const u8 = try std.os.getcwd(workdir_buf[0..]); - // Output.err("error", "Current working directory: {s}", .{workdir}); Global.exit(1); } From 496bac068f4690a6943624126f7cb22a8e9c4c8c Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Mon, 15 Jan 2024 10:53:43 -0800 Subject: [PATCH 06/74] add workspace tests, update harness to handle nested dirs --- test/cli/run/workspace.test.ts | 100 +++++++++++++++++++++++++++++++++ test/harness.ts | 28 +++++---- 2 files changed, 117 insertions(+), 11 deletions(-) create mode 100644 test/cli/run/workspace.test.ts diff --git a/test/cli/run/workspace.test.ts b/test/cli/run/workspace.test.ts new file mode 100644 index 0000000000000..93e78b76e8de0 --- /dev/null +++ b/test/cli/run/workspace.test.ts @@ -0,0 +1,100 @@ +// @known-failing-on-windows: 1 failing +import { describe, test, expect, beforeAll } from "bun:test"; +import { spawnSync } from "bun"; +import { bunEnv, bunExe, tempDirWithFiles } from "harness"; + +let cwd_root: string; +let cwd_packages: string; +let cwd_a: string; +let cwd_b: string; + +beforeAll(() => { + var path = require("path"); + cwd_root = tempDirWithFiles("testworkspace", { + "packages": { + "a": { + "package.json": JSON.stringify({ + "name": "a", + "scripts": { + "present": "echo 1234", + }, + }), + }, + "b": { + "package.json": JSON.stringify({ + "name": "b", + "scripts": { + "present": "echo 4321", + }, + }), + }, + }, + "package.json": JSON.stringify({ + "name": "ws", + "workspaces": [ + "packages/a", + "packages/b", + ], + }), + }); + cwd_packages = path.join(cwd_root, "packages"); + cwd_a = path.join(cwd_packages, "a"); + cwd_b = path.join(cwd_packages, "b"); +}); + +function runInCwdSuccess(cwd: string, pkgname: string, result: RegExp) { + const { exitCode, stdout, stderr } = spawnSync({ + cwd: cwd, + cmd: [bunExe(), "run", "--workspace", pkgname, "present"], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + expect(stdout.toString()).toMatch(result); + // expect(stderr.toString()).toBeEmpty(); + expect(exitCode).toBe(0); +} + +function runInCwdFailure(cwd: string, pkgname: string, result: RegExp) { + const { exitCode, stdout, stderr } = spawnSync({ + cwd: cwd, + cmd: [bunExe(), "run", "--workspace", pkgname, "present"], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + expect(stdout.toString()).toBeEmpty(); + expect(stderr.toString()).toMatch(result); + expect(exitCode).toBe(1); +} + +describe("bun", () => { + test("resolve 'a' from root", () => { + runInCwdSuccess(cwd_root, "a", /1234/); + }); + test("resolve 'b' from root", () => { + runInCwdSuccess(cwd_root, "b", /4321/); + }); + test("resolve 'a' from middle", () => { + runInCwdSuccess(cwd_packages, "a", /1234/); + }); + test("resolve 'b' from middle", () => { + runInCwdSuccess(cwd_packages, "b", /4321/); + }); + test("resolve 'a' from self", () => { + runInCwdSuccess(cwd_a, "a", /1234/); + }); + test("resolve 'b' from self", () => { + runInCwdSuccess(cwd_b, "b", /4321/); + }); + test("resolve 'a' from other", () => { + runInCwdSuccess(cwd_b, "a", /1234/); + }); + test("resolve 'b' from other", () => { + runInCwdSuccess(cwd_a, "b", /4321/); + }); + + test("should error with missing workspace", () => { + runInCwdFailure(cwd_root, "notpresent", /workspace/); + }); +}); diff --git a/test/harness.ts b/test/harness.ts index 2e2c64eeaf2f3..2def0d67b15db 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -85,24 +85,30 @@ export function hideFromStackTrace(block: CallableFunction) { }); } -export function tempDirWithFiles(basename: string, files: Record>): string { +type DirectoryTree = { + [name: string]: string | DirectoryTree; +}; + +export function tempDirWithFiles(basename: string, files: DirectoryTree): string { var fs = require("fs"); var path = require("path"); var { tmpdir } = require("os"); - const dir = fs.mkdtempSync(path.join(fs.realpathSync(tmpdir()), basename + "_")); - for (const [name, contents] of Object.entries(files)) { - if (typeof contents === "object") { - for (const [_name, _contents] of Object.entries(contents)) { - fs.mkdirSync(path.dirname(path.join(dir, name, _name)), { recursive: true }); - fs.writeFileSync(path.join(dir, name, _name), _contents); + function makeTree(base: string, tree: DirectoryTree) { + for (const [name, contents] of Object.entries(tree)) { + if (typeof contents === "object") { + fs.mkdirSync(path.join(base, name), { recursive: true }); + makeTree(path.join(base, name), contents); + continue; } - continue; + fs.mkdirSync(path.dirname(path.join(base, name)), { recursive: true }); + fs.writeFileSync(path.join(base, name), contents); } - fs.mkdirSync(path.dirname(path.join(dir, name)), { recursive: true }); - fs.writeFileSync(path.join(dir, name), contents); } - return dir; + + const base = fs.mkdtempSync(path.join(fs.realpathSync(tmpdir()), basename + "_")); + makeTree(base, files); + return base; } export function bunRun(file: string, env?: Record) { From 1b659b331187976cf490df1b6b7e655b64b50762 Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 18:57:52 +0000 Subject: [PATCH 07/74] [autofix.ci] apply automated fixes --- src/cli.zig | 2 +- test/cli/run/workspace.test.ts | 9 +++------ 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index 30cd747147517..68e29627e3cec 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -452,7 +452,7 @@ pub const Arguments = struct { return false; }, }; - return strings.eql(name, target_name); + return strings.eql(name, target_name); } pub fn parse(allocator: std.mem.Allocator, ctx: *Command.Context, comptime cmd: Command.Tag) !Api.TransformOptions { diff --git a/test/cli/run/workspace.test.ts b/test/cli/run/workspace.test.ts index 93e78b76e8de0..27fddd5677181 100644 --- a/test/cli/run/workspace.test.ts +++ b/test/cli/run/workspace.test.ts @@ -16,7 +16,7 @@ beforeAll(() => { "package.json": JSON.stringify({ "name": "a", "scripts": { - "present": "echo 1234", + "present": "echo 1234", }, }), }, @@ -24,17 +24,14 @@ beforeAll(() => { "package.json": JSON.stringify({ "name": "b", "scripts": { - "present": "echo 4321", + "present": "echo 4321", }, }), }, }, "package.json": JSON.stringify({ "name": "ws", - "workspaces": [ - "packages/a", - "packages/b", - ], + "workspaces": ["packages/a", "packages/b"], }), }); cwd_packages = path.join(cwd_root, "packages"); From 2f336776d0937ae6ce754755f8e9577973f2573b Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Mon, 15 Jan 2024 18:18:32 -0800 Subject: [PATCH 08/74] basic filtering --- src/api/schema.zig | 10 ++--- src/bundler.zig | 2 +- src/bundler/bundle_v2.zig | 2 +- src/cli.zig | 88 +++++++++++++++++++++------------------ src/cli/run_command.zig | 5 +++ 5 files changed, 60 insertions(+), 47 deletions(-) diff --git a/src/api/schema.zig b/src/api/schema.zig index d618522fe7381..a22523d51702f 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -1689,8 +1689,8 @@ pub const Api = struct { /// origin origin: ?[]const u8 = null, - /// absolute_working_dir - absolute_working_dir: ?[]const u8 = null, + /// cwd_override + cwd_override: ?[]const u8 = null, /// define define: ?StringMap = null, @@ -1774,7 +1774,7 @@ pub const Api = struct { this.origin = try reader.readValue([]const u8); }, 5 => { - this.absolute_working_dir = try reader.readValue([]const u8); + this.cwd_override = try reader.readValue([]const u8); }, 6 => { this.define = try reader.readValue(StringMap); @@ -1861,9 +1861,9 @@ pub const Api = struct { try writer.writeFieldID(4); try writer.writeValue(@TypeOf(origin), origin); } - if (this.absolute_working_dir) |absolute_working_dir| { + if (this.cwd_override) |cwd_override| { try writer.writeFieldID(5); - try writer.writeValue(@TypeOf(absolute_working_dir), absolute_working_dir); + try writer.writeValue(@TypeOf(cwd_override), cwd_override); } if (this.define) |define| { try writer.writeFieldID(6); diff --git a/src/bundler.zig b/src/bundler.zig index 4ae07ace9a89c..f43e1a1f96d84 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -388,7 +388,7 @@ pub const Bundler = struct { js_ast.Expr.Data.Store.create(allocator); js_ast.Stmt.Data.Store.create(allocator); const fs = try Fs.FileSystem.init( - opts.absolute_working_dir, + opts.cwd_override, ); const bundle_options = try options.BundleOptions.fromApi( allocator, diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index a4263182c39e6..7ab84f1ab7179 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -1605,7 +1605,7 @@ pub const BundleV2 = struct { .define = if (config.define.count() > 0) config.define.toAPI() else null, .entry_points = config.entry_points.keys(), .target = config.target.toAPI(), - .absolute_working_dir = if (config.dir.list.items.len > 0) config.dir.toOwnedSliceLeaky() else null, + .cwd_override = if (config.dir.list.items.len > 0) config.dir.toOwnedSliceLeaky() else null, .inject = &.{}, .external = config.external.keys(), .main_fields = &.{}, diff --git a/src/cli.zig b/src/cli.zig index 68e29627e3cec..8210985edf44a 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -150,7 +150,8 @@ pub const Arguments = struct { const base_params_ = [_]ParamType{ clap.parseParam("--env-file ... Load environment variables from the specified file(s)") catch unreachable, clap.parseParam("--cwd Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable, - clap.parseParam("-w, --workspace Run a script from a workspace member package") catch unreachable, + // clap.parseParam("-w, --workspace Perform the command on the specified workspace member package") catch unreachable, + clap.parseParam("-F, --filter ... Perform the command on all workspace member packages that match the pattern") catch unreachable, clap.parseParam("-c, --config ? Specify path to Bun config file. Default $cwd/bunfig.toml") catch unreachable, clap.parseParam("-h, --help Display this menu and exit") catch unreachable, clap.parseParam("...") catch unreachable, @@ -339,16 +340,17 @@ pub const Arguments = struct { config_buf[config_path_.len] = 0; config_path = config_buf[0..config_path_.len :0]; } else { - if (ctx.args.absolute_working_dir == null) { - var secondbuf: [bun.MAX_PATH_BYTES]u8 = undefined; - const cwd = bun.getcwd(&secondbuf) catch return; - - ctx.args.absolute_working_dir = try allocator.dupe(u8, cwd); + var cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + var cwd: []const u8 = undefined; + if (ctx.args.cwd_override) |cwd_val| { + cwd = cwd_val; + } else { + cwd = try bun.getcwd(&cwd_buf); } - var parts = [_]string{ ctx.args.absolute_working_dir.?, config_path_ }; + var parts = [_]string{ cwd, config_path_ }; config_path_ = resolve_path.joinAbsStringBuf( - ctx.args.absolute_working_dir.?, + cwd, &config_buf, &parts, .auto, @@ -491,26 +493,33 @@ pub const Arguments = struct { } } - var cwd: []u8 = undefined; + var cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + var cwd: []const u8 = undefined; if (args.option("--cwd")) |cwd_| { - cwd = brk: { - var outbuf: [bun.MAX_PATH_BYTES]u8 = undefined; - const out = std.os.realpath(cwd_, &outbuf) catch |err| { - Output.prettyErrorln("error resolving --cwd: {s}", .{@errorName(err)}); - Global.exit(1); - }; - break :brk try allocator.dupe(u8, out); + var outbuf: [bun.MAX_PATH_BYTES]u8 = undefined; + const out = std.os.realpath(cwd_, &outbuf) catch |err| { + Output.prettyErrorln("error resolving --cwd: {s}", .{@errorName(err)}); + Global.exit(1); }; + std.os.chdir(out) catch |err| { + Output.prettyErrorln("error setting --cwd: {s}", .{@errorName(err)}); + Global.exit(1); + }; + cwd = try allocator.dupe(u8, out); + ctx.args.cwd_override = cwd; } else { - cwd = try bun.getcwdAlloc(allocator); + cwd = try bun.getcwd(&cwd_buf); } - if (args.option("--workspace")) |package_name| { + + const filters = args.options("--filter"); + if (filters.len > 0) { // TODO in the future we can try loading the lockfile to get the workspace information more quickly // var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); // const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); // if (load_lockfile == .not_found) { + // find the paths of all projects that match this filter bun.JSAst.Expr.Data.Store.create(default_allocator); bun.JSAst.Stmt.Data.Store.create(default_allocator); @@ -518,36 +527,34 @@ pub const Arguments = struct { defer wsmap.deinit(); // find the root package.json of the workspace and load the child packages into workspace map findWorkspaceRoot(allocator, ctx.log, &wsmap, cwd) catch |err| { - Output.prettyErrorln("error resolving --workspace: {s}", .{@errorName(err)}); + Output.prettyErrorln("error resolving --filter: {s}", .{@errorName(err)}); Global.exit(1); }; - var found = false; - for (wsmap.keys()) |path| { - // check if the package at the path matches the name provided as an argument - const matches = packageMatchesName(allocator, ctx.log, path, package_name) catch |err| { - Output.prettyErrorln("error resolving --workspace: {s}", .{@errorName(err)}); - Global.exit(1); - }; - if (matches) { - cwd = try allocator.dupe(u8, path); - found = true; - break; + var list = std.ArrayListUnmanaged([]u8){}; + + for (filters) |pattern| { + std.debug.print("pattern: {s}\n", .{pattern}); + for (wsmap.keys()) |path| { + // check if the package at the path matches the name provided as an argument + const matches = packageMatchesName(allocator, ctx.log, path, pattern) catch |err| { + Output.prettyErrorln("error resolving --filter: {s}", .{@errorName(err)}); + Global.exit(1); + }; + if (matches) { + try list.append(allocator, try allocator.dupe(u8, path)); + } } } - bun.JSAst.Expr.Data.Store.reset(); bun.JSAst.Stmt.Data.Store.reset(); - // if we didn't find a package with the name provided, try to resolve it as a path - if (!found) { - var outbuf: [bun.MAX_PATH_BYTES]u8 = undefined; - const out = std.os.realpath(package_name, &outbuf) catch { - Output.prettyErrorln("error resolving --workspace: no package found with name {s}", .{package_name}); - Global.exit(1); - }; - cwd = try allocator.dupe(u8, out); + if (list.items.len == 0) { + Output.prettyErrorln("error resolving --filter: no packages matched the filter", .{}); + Global.exit(1); } + + ctx.workspace_paths = list.items; } if (cmd == .TestCommand) { @@ -606,7 +613,6 @@ pub const Arguments = struct { ctx.test_options.only = args.flag("--only"); } - ctx.args.absolute_working_dir = cwd; ctx.positionals = args.positionals(); if (comptime Command.Tag.loads_config.get(cmd)) { @@ -1221,6 +1227,8 @@ pub const Command = struct { bundler_options: BundlerOptions = BundlerOptions{}, runtime_options: RuntimeOptions = RuntimeOptions{}, + workspace_paths: [][]const u8 = &[_][]const u8{}, + preloads: []const string = &[_]string{}, has_loaded_global_config: bool = false, diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 3c172351020e5..c34653de4d440 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1011,6 +1011,11 @@ pub const RunCommand = struct { pub fn exec(ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !bool { var ctx = ctx_; + + // for (ctx.workspace_paths) |path| { + // } + std.debug.print("found {d} workspace paths\n", .{ctx.workspace_paths.len}); + // Step 1. Figure out what we're trying to run var positionals = ctx.positionals; if (positionals.len > 0 and strings.eqlComptime(positionals[0], "run") or strings.eqlComptime(positionals[0], "r")) { From ea8ea8d9d2bc2f816c2127de5f7664c7124b7f76 Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Tue, 16 Jan 2024 03:06:17 +0000 Subject: [PATCH 09/74] [autofix.ci] apply automated fixes --- src/cli.zig | 1 - 1 file changed, 1 deletion(-) diff --git a/src/cli.zig b/src/cli.zig index 8210985edf44a..8e1360e7fdea7 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -511,7 +511,6 @@ pub const Arguments = struct { cwd = try bun.getcwd(&cwd_buf); } - const filters = args.options("--filter"); if (filters.len > 0) { // TODO in the future we can try loading the lockfile to get the workspace information more quickly From d8d0ffb8cbee31d02e2aca5b6f01800472176046 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Tue, 16 Jan 2024 14:56:56 -0800 Subject: [PATCH 10/74] working filter without patterns --- src/cli.zig | 12 ++----- src/cli/run_command.zig | 60 +++++++++++++++++++++++++--------- test/cli/run/workspace.test.ts | 28 +++++++++++++--- 3 files changed, 69 insertions(+), 31 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index 8e1360e7fdea7..dade2baac28f6 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -152,6 +152,7 @@ pub const Arguments = struct { clap.parseParam("--cwd Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable, // clap.parseParam("-w, --workspace Perform the command on the specified workspace member package") catch unreachable, clap.parseParam("-F, --filter ... Perform the command on all workspace member packages that match the pattern") catch unreachable, + // clap.parseParam("--fail-if-no-match Fail if no packages match the filter") catch unreachable, clap.parseParam("-c, --config ? Specify path to Bun config file. Default $cwd/bunfig.toml") catch unreachable, clap.parseParam("-h, --help Display this menu and exit") catch unreachable, clap.parseParam("...") catch unreachable, @@ -548,11 +549,6 @@ pub const Arguments = struct { bun.JSAst.Expr.Data.Store.reset(); bun.JSAst.Stmt.Data.Store.reset(); - if (list.items.len == 0) { - Output.prettyErrorln("error resolving --filter: no packages matched the filter", .{}); - Global.exit(1); - } - ctx.workspace_paths = list.items; } @@ -1749,11 +1745,7 @@ pub const Command = struct { const ctx = try Command.Context.create(allocator, log, .RunCommand); if (ctx.positionals.len > 0) { - if (try RunCommand.exec(ctx, false, true)) { - return; - } - - Global.exit(1); + try RunCommand.execAll(ctx, false); } }, .RunAsNodeCommand => { diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index c34653de4d440..1f29c35cda66d 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -340,8 +340,7 @@ pub const RunCommand = struct { Output.prettyErrorln("error: script \"{s}\" exited with code {d}", .{ name, code }); Output.flush(); } - - Global.exit(code); + return false; } }, .Signal => |signal| { @@ -417,13 +416,13 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - Global.exit(1); + return false; } } } } Output.prettyErrorln("error: Failed to run \"{s}\" due to error {s}", .{ basenameOrBun(executable), @errorName(err) }); - Global.exit(1); + return false; }; switch (result) { .Exited => |code| { @@ -452,7 +451,7 @@ pub const RunCommand = struct { Output.errGeneric("\"{s}\" exited with code {d}", .{ basenameOrBun(executable), code }); } } - Global.exit(code); + return false; }, .Signal, .Stopped => |sig| { // forward the signal to the shell / parent process @@ -468,7 +467,7 @@ pub const RunCommand = struct { if (!silent) { Output.errGeneric("\"{s}\" stopped with unknown state {d}", .{ basenameOrBun(executable), sig }); } - Global.exit(1); + return false; }, } @@ -1009,13 +1008,43 @@ pub const RunCommand = struct { Output.flush(); } + pub fn execAll(ctx: Command.Context, comptime bin_dirs_only: bool) !void { + // if there are no workspace paths specified, run in the current directory + if (ctx.workspace_paths.len == 0) { + _ = try exec(ctx, bin_dirs_only, true); + return; + } + const fsinstance = try bun.fs.FileSystem.init(null); + const olddir = fsinstance.top_level_dir; + defer { + // change back to the original directory once we're done + fsinstance.top_level_dir = olddir; + std.os.chdir(olddir) catch |err| { + Output.prettyErrorln("error: Failed to change directory to {s} due to error {s}", .{ olddir, @errorName(err) }); + Global.crash(); + }; + } + var ok = true; + for (ctx.workspace_paths) |path| { + std.os.chdir(path) catch |err| { + Output.prettyErrorln("error: Failed to change directory to {s} due to error {s}", .{ path, @errorName(err) }); + continue; + }; + fsinstance.top_level_dir = path; + const res = exec(ctx, bin_dirs_only, true) catch |err| { + Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); + continue; + }; + ok = ok and res; + } + if (!ok) { + Global.exit(1); + } + } + pub fn exec(ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !bool { var ctx = ctx_; - // for (ctx.workspace_paths) |path| { - // } - std.debug.print("found {d} workspace paths\n", .{ctx.workspace_paths.len}); - // Step 1. Figure out what we're trying to run var positionals = ctx.positionals; if (positionals.len > 0 and strings.eqlComptime(positionals[0], "run") or strings.eqlComptime(positionals[0], "r")) { @@ -1049,7 +1078,7 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - Global.exit(1); + return false; }; return true; } @@ -1117,7 +1146,7 @@ pub const RunCommand = struct { const shebang_size = file.pread(&shebang_buf, 0) catch |err| { if (!ctx.debug.silent) Output.prettyErrorln("error: Failed to read file {s} due to error {s}", .{ file_path, @errorName(err) }); - Global.exit(1); + return false; }; var shebang: string = shebang_buf[0..shebang_size]; @@ -1149,7 +1178,7 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - Global.exit(1); + return false; }; return true; @@ -1236,7 +1265,7 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - Global.exit(1); + return false; }; } }, @@ -1247,7 +1276,7 @@ pub const RunCommand = struct { if (script_name_to_search.len == 0) { if (comptime log_errors) { Output.prettyError("No \"scripts\" in package.json found.\n", .{}); - Global.exit(0); + return false; } return false; @@ -1299,7 +1328,6 @@ pub const RunCommand = struct { if (comptime log_errors) { Output.prettyError("error: Script not found \"{s}\"\n", .{script_name_to_search}); - Global.exit(1); } return false; diff --git a/test/cli/run/workspace.test.ts b/test/cli/run/workspace.test.ts index 27fddd5677181..80bb465b0aec7 100644 --- a/test/cli/run/workspace.test.ts +++ b/test/cli/run/workspace.test.ts @@ -39,15 +39,27 @@ beforeAll(() => { cwd_b = path.join(cwd_packages, "b"); }); -function runInCwdSuccess(cwd: string, pkgname: string, result: RegExp) { +function runInCwdSuccess(cwd: string, pkgname: string | string[], result: RegExp | RegExp[]) { + let cmd = [bunExe(), "run"] + if (pkgname instanceof Array) { + for (let p of pkgname) { + cmd.push("-F", p) + } + } else { + cmd.push("-F", pkgname) + } + cmd.push("present") const { exitCode, stdout, stderr } = spawnSync({ cwd: cwd, - cmd: [bunExe(), "run", "--workspace", pkgname, "present"], + cmd: cmd, env: bunEnv, stdout: "pipe", stderr: "pipe", }); - expect(stdout.toString()).toMatch(result); + const stdoutval = stdout.toString() + for (let r of result instanceof Array ? result : [result]) { + expect(stdoutval).toMatch(r); + } // expect(stderr.toString()).toBeEmpty(); expect(exitCode).toBe(0); } @@ -55,7 +67,7 @@ function runInCwdSuccess(cwd: string, pkgname: string, result: RegExp) { function runInCwdFailure(cwd: string, pkgname: string, result: RegExp) { const { exitCode, stdout, stderr } = spawnSync({ cwd: cwd, - cmd: [bunExe(), "run", "--workspace", pkgname, "present"], + cmd: [bunExe(), "run", "-F", pkgname, "present"], env: bunEnv, stdout: "pipe", stderr: "pipe", @@ -90,8 +102,14 @@ describe("bun", () => { test("resolve 'b' from other", () => { runInCwdSuccess(cwd_a, "b", /4321/); }); + test("resolve 'a' and 'b' from 'a'", () => { + runInCwdSuccess(cwd_a, ["a", "b"], [/1234/, /4321/]); + }); + test("resolve 'a' and 'b' from 'b'", () => { + runInCwdSuccess(cwd_a, ["a", "b"], [/1234/, /4321/]); + }); test("should error with missing workspace", () => { - runInCwdFailure(cwd_root, "notpresent", /workspace/); + runInCwdFailure(cwd_root, "notpresent", /filter/); }); }); From 0c9c555bbd3d49a86417e1fa9910d6f868fcc57e Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Tue, 16 Jan 2024 19:22:39 -0800 Subject: [PATCH 11/74] update tests, filter mostly working --- src/cli.zig | 143 ++++++++++++++++++++++++++-------------- src/cli/run_command.zig | 5 +- src/logger.zig | 8 +++ 3 files changed, 105 insertions(+), 51 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index dade2baac28f6..5a44fbdfe0292 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -20,6 +20,7 @@ const js_printer = bun.js_printer; const js_ast = bun.JSAst; const linker = @import("linker.zig"); const RegularExpression = bun.RegularExpression; +const Glob = @import("glob.zig"); const Package = @import("install/lockfile.zig").Package; @@ -151,7 +152,7 @@ pub const Arguments = struct { clap.parseParam("--env-file ... Load environment variables from the specified file(s)") catch unreachable, clap.parseParam("--cwd Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable, // clap.parseParam("-w, --workspace Perform the command on the specified workspace member package") catch unreachable, - clap.parseParam("-F, --filter ... Perform the command on all workspace member packages that match the pattern") catch unreachable, + clap.parseParam("--filter ... Perform the command on all workspace member packages that match the pattern") catch unreachable, // clap.parseParam("--fail-if-no-match Fail if no packages match the filter") catch unreachable, clap.parseParam("-c, --config ? Specify path to Bun config file. Default $cwd/bunfig.toml") catch unreachable, clap.parseParam("-h, --help Display this menu and exit") catch unreachable, @@ -374,7 +375,6 @@ pub const Arguments = struct { fn findWorkspaceRoot(allocator: std.mem.Allocator, log: *logger.Log, workspace_map: *Package.WorkspaceMap, workdir_: []const u8) !void { var workdir = workdir_; - var package_json_cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined; while (true) : (workdir = std.fs.path.dirname(workdir) orelse return error.MissingPackageJSON) { const parent_trimmed = strings.withoutTrailingSlash(workdir); @@ -382,6 +382,10 @@ pub const Arguments = struct { @memcpy(buf2[0..parent_trimmed.len], parent_trimmed); buf2[parent_trimmed.len..buf2.len][0.."/package.json".len].* = "/package.json".*; buf2[parent_trimmed.len + "/package.json".len] = 0; + const json_path = buf2[0 .. parent_trimmed.len + "/package.json".len]; + log.msgs.clearRetainingCapacity(); + log.errors = 0; + log.warnings = 0; const json_file = std.fs.cwd().openFileZ( buf2[0 .. parent_trimmed.len + "/package.json".len :0].ptr, @@ -393,7 +397,6 @@ pub const Arguments = struct { const json_buf = try allocator.alloc(u8, json_stat_size + 64); defer allocator.free(json_buf); const json_len = try json_file.preadAll(json_buf, 0); - const json_path = try bun.getFdPath(json_file.handle, &package_json_cwd_buf); const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator); @@ -421,41 +424,9 @@ pub const Arguments = struct { return; } - return error.MissingPackageJSON; - } + // if we were not able to find a workspace root, try globbing for package.json files - fn packageMatchesName(allocator: std.mem.Allocator, log: *logger.Log, path: []const u8, target_name: []const u8) !bool { - var local_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined; - const pkgstring = "/package.json"; - @memcpy(local_buf[0..path.len], path); - local_buf[path.len..local_buf.len][0..pkgstring.len].* = pkgstring.*; - local_buf[path.len + pkgstring.len] = 0; - - const json_file = std.fs.cwd().openFileZ( - local_buf[0 .. path.len + pkgstring.len :0].ptr, - .{ .mode = .read_only }, - ) catch { - return false; - }; - defer json_file.close(); - - const json_stat_size = try json_file.getEndPos(); - const json_buf = try allocator.alloc(u8, json_stat_size + 64); - defer allocator.free(json_buf); - const json_len = try json_file.preadAll(json_buf, 0); - var local_buf2: [bun.MAX_PATH_BYTES]u8 = undefined; - const json_path = try bun.getFdPath(json_file.handle, &local_buf2); - const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); - const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator); - const prop = json.asProperty("name") orelse return false; - const name = switch (prop.expr.data) { - .e_string => |str| str.data, - else => { - Output.err("error", "package.json name is not a string", .{}); - return false; - }, - }; - return strings.eql(name, target_name); + return error.MissingPackageJSON; } pub fn parse(allocator: std.mem.Allocator, ctx: *Command.Context, comptime cmd: Command.Tag) !Api.TransformOptions { @@ -514,6 +485,7 @@ pub const Arguments = struct { const filters = args.options("--filter"); if (filters.len > 0) { + ctx.has_filter = true; // TODO in the future we can try loading the lockfile to get the workspace information more quickly // var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); // const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); @@ -527,29 +499,99 @@ pub const Arguments = struct { defer wsmap.deinit(); // find the root package.json of the workspace and load the child packages into workspace map findWorkspaceRoot(allocator, ctx.log, &wsmap, cwd) catch |err| { - Output.prettyErrorln("error resolving --filter: {s}", .{@errorName(err)}); + if (comptime Environment.allow_assert) { + if (@errorReturnTrace()) |trace| { + std.debug.print("Error: {s}\n{}\n", .{ @errorName(err), trace }); + } + } + Output.err(err, "Failed to find workspace root in {s}", .{cwd}); + ctx.log.printForLogLevelColorsRuntime(Output.errorWriter(), Output.enable_ansi_colors) catch {}; Global.exit(1); }; - var list = std.ArrayListUnmanaged([]u8){}; + var matched_paths = std.ArrayListUnmanaged([]u8){}; + const PkgInfo = struct { + // borrowed + path: []const u8, + // owned + name: []const u8, + }; - for (filters) |pattern| { - std.debug.print("pattern: {s}\n", .{pattern}); - for (wsmap.keys()) |path| { - // check if the package at the path matches the name provided as an argument - const matches = packageMatchesName(allocator, ctx.log, path, pattern) catch |err| { - Output.prettyErrorln("error resolving --filter: {s}", .{@errorName(err)}); - Global.exit(1); - }; - if (matches) { - try list.append(allocator, try allocator.dupe(u8, path)); + var packages = std.ArrayList(PkgInfo).init(allocator); + defer { + for (packages.items) |package| { + allocator.free(package.name); + } + packages.deinit(); + } + + bun.JSAst.Expr.Data.Store.create(allocator); + bun.JSAst.Stmt.Data.Store.create(allocator); + + // for each path in the workspace map, load the name from package.json + for (wsmap.keys()) |path| { + var local_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined; + const pkgstring = "/package.json"; + @memcpy(local_buf[0..path.len], path); + local_buf[path.len..local_buf.len][0..pkgstring.len].* = pkgstring.*; + local_buf[path.len + pkgstring.len] = 0; + const json_path = local_buf[0 .. path.len + pkgstring.len]; + + const json_file = std.fs.cwd().openFileZ( + local_buf[0 .. path.len + pkgstring.len :0].ptr, + .{ .mode = .read_only }, + ) catch { + continue; + }; + defer json_file.close(); + + const json_stat_size = try json_file.getEndPos(); + const json_buf = try allocator.alloc(u8, json_stat_size + 64); + defer allocator.free(json_buf); + + const json_len = try json_file.preadAll(json_buf, 0); + const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); + + var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, ctx.log); + _ = try parser.parseExpr(); + if (!parser.has_found_name) { + continue; + } + try packages.append(PkgInfo{ .path = path, .name = try allocator.dupe(u8, parser.found_name) }); + } + + var pattern_stack = std.heap.stackFallback(4096, bun.default_allocator); + var pattern = std.ArrayList(u32).init(pattern_stack.get()); + defer pattern.deinit(); + + // check each pattern against each package name + for (filters) |pattern_utf8_| { + var pattern_utf8 = pattern_utf8_; + var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + + const is_file_pattern = pattern_utf8.len > 0 and pattern_utf8[0] == '.'; + if (is_file_pattern) { + const parts = [_]string{pattern_utf8}; + pattern_utf8 = bun.path.joinAbsStringBuf(cwd, &path_buf, &parts, .auto); + } + + pattern.clearRetainingCapacity(); + var codepointer_iter = strings.UnsignedCodepointIterator.init(pattern_utf8); + var cursor = strings.UnsignedCodepointIterator.Cursor{}; + while (codepointer_iter.next(&cursor)) { + try pattern.append(cursor.c); + } + for (packages.items) |package| { + const target = if (is_file_pattern) package.path else package.name; + if (Glob.matchImpl(pattern.items, target)) { + try matched_paths.append(allocator, try allocator.dupe(u8, package.path)); } } } bun.JSAst.Expr.Data.Store.reset(); bun.JSAst.Stmt.Data.Store.reset(); - ctx.workspace_paths = list.items; + ctx.workspace_paths = matched_paths.items; } if (cmd == .TestCommand) { @@ -1223,6 +1265,7 @@ pub const Command = struct { runtime_options: RuntimeOptions = RuntimeOptions{}, workspace_paths: [][]const u8 = &[_][]const u8{}, + has_filter: bool = false, preloads: []const string = &[_]string{}, has_loaded_global_config: bool = false, diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 1f29c35cda66d..f7cf467fd2557 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1010,7 +1010,7 @@ pub const RunCommand = struct { pub fn execAll(ctx: Command.Context, comptime bin_dirs_only: bool) !void { // if there are no workspace paths specified, run in the current directory - if (ctx.workspace_paths.len == 0) { + if (!ctx.has_filter) { _ = try exec(ctx, bin_dirs_only, true); return; } @@ -1026,6 +1026,7 @@ pub const RunCommand = struct { } var ok = true; for (ctx.workspace_paths) |path| { + Output.prettyErrorln("In {s}:", .{path}); std.os.chdir(path) catch |err| { Output.prettyErrorln("error: Failed to change directory to {s} due to error {s}", .{ path, @errorName(err) }); continue; @@ -1036,6 +1037,8 @@ pub const RunCommand = struct { continue; }; ok = ok and res; + // flush outputs to ensure that stdout and stderr are in the correct order for each of the paths + Output.flush(); } if (!ok) { Global.exit(1); diff --git a/src/logger.zig b/src/logger.zig index 604ebfa83f567..46848ed7bc4af 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -1257,6 +1257,14 @@ pub const Log = struct { if (needs_newline) _ = try to.write("\n"); } + pub fn printForLogLevelColorsRuntime(self: *Log, to: anytype, enable_ansi_colors: bool) !void { + if (enable_ansi_colors) { + return self.printForLogLevelWithEnableAnsiColors(to, true); + } else { + return self.printForLogLevelWithEnableAnsiColors(to, false); + } + } + pub fn toZigException(this: *const Log, allocator: std.mem.Allocator) *js.ZigException.Holder { var holder = try allocator.create(js.ZigException.Holder); holder.* = js.ZigException.Holder.init(); From f74aad979927e82c6ae40067e589090d6a52d286 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Tue, 16 Jan 2024 19:53:56 -0800 Subject: [PATCH 12/74] simplify package name parsing, commit tests --- src/cli.zig | 121 ++++++++++++--------- test/cli/run/filter-workspace.test.ts | 145 ++++++++++++++++++++++++++ test/cli/run/workspace.test.ts | 115 -------------------- 3 files changed, 214 insertions(+), 167 deletions(-) create mode 100644 test/cli/run/filter-workspace.test.ts delete mode 100644 test/cli/run/workspace.test.ts diff --git a/src/cli.zig b/src/cli.zig index 5a44fbdfe0292..97972adb6db54 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -424,6 +424,22 @@ pub const Arguments = struct { return; } + // var walker = Glob.BunGlobWalker{}; + // var arena = std.heap.ArenaAllocator.init(allocator); + // try walker.init(&walker, &arena, "**/package.json", true, true, false, true); + // defer walker.deinit(true); + + // var iter = Glob.BunGlobWalker.Iterator{ .walker = &walker }; + // _ = try iter.init(); + + // while (switch (try iter.next()) { + // .err => |err| return .{ .err = err }, + // .result => |matched_path| matched_path, + // }) |path| { + // // try workspace_map.insert() + // try this.matchedPaths.append(this.arena.allocator(), BunString.fromBytes(path)); + // } + // if we were not able to find a workspace root, try globbing for package.json files return error.MissingPackageJSON; @@ -510,55 +526,56 @@ pub const Arguments = struct { }; var matched_paths = std.ArrayListUnmanaged([]u8){}; - const PkgInfo = struct { - // borrowed - path: []const u8, - // owned - name: []const u8, - }; - - var packages = std.ArrayList(PkgInfo).init(allocator); - defer { - for (packages.items) |package| { - allocator.free(package.name); - } - packages.deinit(); - } - - bun.JSAst.Expr.Data.Store.create(allocator); - bun.JSAst.Stmt.Data.Store.create(allocator); + // const PkgInfo = struct { + // // borrowed + // path: []const u8, + // // owned + // name: []const u8, + // }; + + // var packages = std.ArrayList(PkgInfo).init(allocator); + // defer { + // for (packages.items) |package| { + // allocator.free(package.name); + // } + // packages.deinit(); + // } + + // bun.JSAst.Expr.Data.Store.create(allocator); + // bun.JSAst.Stmt.Data.Store.create(allocator); // for each path in the workspace map, load the name from package.json - for (wsmap.keys()) |path| { - var local_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined; - const pkgstring = "/package.json"; - @memcpy(local_buf[0..path.len], path); - local_buf[path.len..local_buf.len][0..pkgstring.len].* = pkgstring.*; - local_buf[path.len + pkgstring.len] = 0; - const json_path = local_buf[0 .. path.len + pkgstring.len]; - - const json_file = std.fs.cwd().openFileZ( - local_buf[0 .. path.len + pkgstring.len :0].ptr, - .{ .mode = .read_only }, - ) catch { - continue; - }; - defer json_file.close(); - - const json_stat_size = try json_file.getEndPos(); - const json_buf = try allocator.alloc(u8, json_stat_size + 64); - defer allocator.free(json_buf); - - const json_len = try json_file.preadAll(json_buf, 0); - const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); - - var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, ctx.log); - _ = try parser.parseExpr(); - if (!parser.has_found_name) { - continue; - } - try packages.append(PkgInfo{ .path = path, .name = try allocator.dupe(u8, parser.found_name) }); - } + // for (wsmap.keys(), wsmap.values()) |path, entry| { + // // std.debug.print("path: {s}\n, name: {s}\n", .{ path, entry.name }); + // var local_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined; + // const pkgstring = "/package.json"; + // @memcpy(local_buf[0..path.len], path); + // local_buf[path.len..local_buf.len][0..pkgstring.len].* = pkgstring.*; + // local_buf[path.len + pkgstring.len] = 0; + // const json_path = local_buf[0 .. path.len + pkgstring.len]; + + // const json_file = std.fs.cwd().openFileZ( + // local_buf[0 .. path.len + pkgstring.len :0].ptr, + // .{ .mode = .read_only }, + // ) catch { + // continue; + // }; + // defer json_file.close(); + + // const json_stat_size = try json_file.getEndPos(); + // const json_buf = try allocator.alloc(u8, json_stat_size + 64); + // defer allocator.free(json_buf); + + // const json_len = try json_file.preadAll(json_buf, 0); + // const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); + + // var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, ctx.log); + // _ = try parser.parseExpr(); + // if (!parser.has_found_name) { + // continue; + // } + // try packages.append(PkgInfo{ .path = path, .name = try allocator.dupe(u8, parser.found_name) }); + // } var pattern_stack = std.heap.stackFallback(4096, bun.default_allocator); var pattern = std.ArrayList(u32).init(pattern_stack.get()); @@ -581,15 +598,15 @@ pub const Arguments = struct { while (codepointer_iter.next(&cursor)) { try pattern.append(cursor.c); } - for (packages.items) |package| { - const target = if (is_file_pattern) package.path else package.name; + for (wsmap.keys(), wsmap.values()) |path, entry| { + const target = if (is_file_pattern) path else entry.name; if (Glob.matchImpl(pattern.items, target)) { - try matched_paths.append(allocator, try allocator.dupe(u8, package.path)); + try matched_paths.append(allocator, try allocator.dupe(u8, path)); } } } - bun.JSAst.Expr.Data.Store.reset(); - bun.JSAst.Stmt.Data.Store.reset(); + // bun.JSAst.Expr.Data.Store.reset(); + // bun.JSAst.Stmt.Data.Store.reset(); ctx.workspace_paths = matched_paths.items; } diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts new file mode 100644 index 0000000000000..e97f73de05f66 --- /dev/null +++ b/test/cli/run/filter-workspace.test.ts @@ -0,0 +1,145 @@ +// @known-failing-on-windows: 1 failing +import { describe, test, expect, beforeAll } from "bun:test"; +import { spawnSync } from "bun"; +import { bunEnv, bunExe, tempDirWithFiles } from "harness"; +import { join } from "path"; + +let cwd_root = tempDirWithFiles("testworkspace", { + "packages": { + "pkga": { + "package.json": JSON.stringify({ + "name": "pkga", + "scripts": { + "present": "echo 1234", + }, + }), + }, + "pkgb": { + "package.json": JSON.stringify({ + "name": "pkgb", + "scripts": { + "present": "echo 4321", + }, + }), + }, + "dirname": { + "package.json": JSON.stringify({ + "name": "pkgc", + "scripts": { + "present": "echo 5678", + }, + }), + }, + }, + "package.json": JSON.stringify({ + "name": "ws", + "workspaces": ["packages/pkga", "packages/pkgb", "packages/dirname"], + }), +}); + +let cwd_packages = join(cwd_root, "packages"); +let cwd_a = join(cwd_packages, "pkga"); +let cwd_b = join(cwd_packages, "pkgb"); +let cwd_c = join(cwd_packages, "dirname"); + +function runInCwdSuccess( + cwd: string, + pattern: string | string[], + target_pattern: RegExp | RegExp[], + antipattern?: RegExp | RegExp[], +) { + let cmd = [bunExe(), "run"]; + if (pattern instanceof Array) { + for (let p of pattern) { + cmd.push("--filter", p); + } + } else { + cmd.push("--filter", pattern); + } + cmd.push("present"); + console.log(cmd); + console.log(cwd); + console.log(cwd_root); + const { exitCode, stdout, stderr } = spawnSync({ + cwd: cwd, + cmd: cmd, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + const stdoutval = stdout.toString(); + console.log(stdoutval, stderr.toString()); + for (let r of target_pattern instanceof Array ? target_pattern : [target_pattern]) { + expect(stdoutval).toMatch(r); + } + if (antipattern !== undefined) { + for (let r of antipattern instanceof Array ? antipattern : [antipattern]) { + expect(stdoutval).not.toMatch(r); + } + } + // expect(stderr.toString()).toBeEmpty(); + expect(exitCode).toBe(0); +} + +function runInCwdFailure(cwd: string, pkgname: string, scriptname: string, result: RegExp) { + const { exitCode, stdout, stderr } = spawnSync({ + cwd: cwd, + cmd: [bunExe(), "run", "--filter", pkgname, scriptname], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + expect(stdout.toString()).toBeEmpty(); + expect(stderr.toString()).toMatch(result); + expect(exitCode).toBe(1); +} + +describe("bun", () => { + let dirs = [cwd_root, cwd_packages, cwd_a, cwd_b, cwd_c]; + let packages = [ + { + name: "pkga", + output: /1234/, + }, + { + name: "pkgb", + output: /4321/, + }, + { + name: "pkgc", + output: /5678/, + }, + ]; + + let names = packages.map(p => p.name); + for (let d of dirs) { + for (let { name, output } of packages) { + test(`resolve ${name} from ${d}`, () => { + runInCwdSuccess(d, name, output); + }); + } + } + + for (let d of dirs) { + test(`resolve '*' from ${d}`, () => { + runInCwdSuccess(d, "*", [/1234/, /4321/, /5678/]); + }); + test(`resolve all from ${d}`, () => { + runInCwdSuccess(d, names, [/1234/, /4321/, /5678/]); + }); + } + + test("resolve all with glob", () => { + runInCwdSuccess(cwd_root, "./packages/*", [/1234/, /4321/, /5678/]); + }); + test("resolve all with recursie glob", () => { + runInCwdSuccess(cwd_root, "./**", [/1234/, /4321/, /5678/]); + }); + test("resolve 'pkga' and 'pkgb' but not 'pkgc' with targeted glob", () => { + runInCwdSuccess(cwd_root, "./packages/pkg*", [/1234/, /4321/], /5678/); + }); + + test("should error with missing script", () => { + runInCwdFailure(cwd_root, "*", "notpresent", /found/); + }); +}); diff --git a/test/cli/run/workspace.test.ts b/test/cli/run/workspace.test.ts deleted file mode 100644 index 80bb465b0aec7..0000000000000 --- a/test/cli/run/workspace.test.ts +++ /dev/null @@ -1,115 +0,0 @@ -// @known-failing-on-windows: 1 failing -import { describe, test, expect, beforeAll } from "bun:test"; -import { spawnSync } from "bun"; -import { bunEnv, bunExe, tempDirWithFiles } from "harness"; - -let cwd_root: string; -let cwd_packages: string; -let cwd_a: string; -let cwd_b: string; - -beforeAll(() => { - var path = require("path"); - cwd_root = tempDirWithFiles("testworkspace", { - "packages": { - "a": { - "package.json": JSON.stringify({ - "name": "a", - "scripts": { - "present": "echo 1234", - }, - }), - }, - "b": { - "package.json": JSON.stringify({ - "name": "b", - "scripts": { - "present": "echo 4321", - }, - }), - }, - }, - "package.json": JSON.stringify({ - "name": "ws", - "workspaces": ["packages/a", "packages/b"], - }), - }); - cwd_packages = path.join(cwd_root, "packages"); - cwd_a = path.join(cwd_packages, "a"); - cwd_b = path.join(cwd_packages, "b"); -}); - -function runInCwdSuccess(cwd: string, pkgname: string | string[], result: RegExp | RegExp[]) { - let cmd = [bunExe(), "run"] - if (pkgname instanceof Array) { - for (let p of pkgname) { - cmd.push("-F", p) - } - } else { - cmd.push("-F", pkgname) - } - cmd.push("present") - const { exitCode, stdout, stderr } = spawnSync({ - cwd: cwd, - cmd: cmd, - env: bunEnv, - stdout: "pipe", - stderr: "pipe", - }); - const stdoutval = stdout.toString() - for (let r of result instanceof Array ? result : [result]) { - expect(stdoutval).toMatch(r); - } - // expect(stderr.toString()).toBeEmpty(); - expect(exitCode).toBe(0); -} - -function runInCwdFailure(cwd: string, pkgname: string, result: RegExp) { - const { exitCode, stdout, stderr } = spawnSync({ - cwd: cwd, - cmd: [bunExe(), "run", "-F", pkgname, "present"], - env: bunEnv, - stdout: "pipe", - stderr: "pipe", - }); - expect(stdout.toString()).toBeEmpty(); - expect(stderr.toString()).toMatch(result); - expect(exitCode).toBe(1); -} - -describe("bun", () => { - test("resolve 'a' from root", () => { - runInCwdSuccess(cwd_root, "a", /1234/); - }); - test("resolve 'b' from root", () => { - runInCwdSuccess(cwd_root, "b", /4321/); - }); - test("resolve 'a' from middle", () => { - runInCwdSuccess(cwd_packages, "a", /1234/); - }); - test("resolve 'b' from middle", () => { - runInCwdSuccess(cwd_packages, "b", /4321/); - }); - test("resolve 'a' from self", () => { - runInCwdSuccess(cwd_a, "a", /1234/); - }); - test("resolve 'b' from self", () => { - runInCwdSuccess(cwd_b, "b", /4321/); - }); - test("resolve 'a' from other", () => { - runInCwdSuccess(cwd_b, "a", /1234/); - }); - test("resolve 'b' from other", () => { - runInCwdSuccess(cwd_a, "b", /4321/); - }); - test("resolve 'a' and 'b' from 'a'", () => { - runInCwdSuccess(cwd_a, ["a", "b"], [/1234/, /4321/]); - }); - test("resolve 'a' and 'b' from 'b'", () => { - runInCwdSuccess(cwd_a, ["a", "b"], [/1234/, /4321/]); - }); - - test("should error with missing workspace", () => { - runInCwdFailure(cwd_root, "notpresent", /filter/); - }); -}); From 4adbf68c7cfbd3bed1d9a8079ccce17daae89675 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Tue, 16 Jan 2024 20:37:52 -0800 Subject: [PATCH 13/74] support filter even without workspace setup --- src/cli.zig | 144 ++++++++++++++++++++++++++-------------------------- 1 file changed, 72 insertions(+), 72 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index 97972adb6db54..89c175212c3c2 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -373,10 +373,18 @@ pub const Arguments = struct { return try loadConfig(allocator, args.option("--config"), ctx, comptime cmd); } - fn findWorkspaceRoot(allocator: std.mem.Allocator, log: *logger.Log, workspace_map: *Package.WorkspaceMap, workdir_: []const u8) !void { + fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *logger.Log, workspace_map: *Package.WorkspaceMap, workdir_: []const u8) !void { + bun.JSAst.Expr.Data.Store.create(default_allocator); + bun.JSAst.Stmt.Data.Store.create(default_allocator); + + defer { + bun.JSAst.Expr.Data.Store.reset(); + bun.JSAst.Stmt.Data.Store.reset(); + } + var workdir = workdir_; - while (true) : (workdir = std.fs.path.dirname(workdir) orelse return error.MissingPackageJSON) { + while (true) : (workdir = std.fs.path.dirname(workdir) orelse break) { const parent_trimmed = strings.withoutTrailingSlash(workdir); var buf2: [bun.MAX_PATH_BYTES + 1]u8 = undefined; @memcpy(buf2[0..parent_trimmed.len], parent_trimmed); @@ -424,25 +432,66 @@ pub const Arguments = struct { return; } - // var walker = Glob.BunGlobWalker{}; - // var arena = std.heap.ArenaAllocator.init(allocator); - // try walker.init(&walker, &arena, "**/package.json", true, true, false, true); - // defer walker.deinit(true); + // if we were not able to find a workspace root, try globbing for package.json files + + var walker = Glob.BunGlobWalker{}; + var arena = std.heap.ArenaAllocator.init(allocator); + const walker_init_res = try walker.init(&arena, "**/package.json", true, true, false, true, true); + switch (walker_init_res) { + .err => |err| { + Output.prettyErrorln("Error: {}", .{err}); + return; + }, + else => {}, + } + defer walker.deinit(true); + + var iter = Glob.BunGlobWalker.Iterator{ .walker = &walker }; + const iter_init_res = try iter.init(); + switch (iter_init_res) { + .err => |err| { + Output.prettyErrorln("Error: {}", .{err}); + return; + }, + else => {}, + } + defer iter.deinit(); + + while (true) { + const next = try iter.next(); + const path = switch (next) { + .err => |err| { + Output.prettyErrorln("Error: {}", .{err}); + continue; + }, + .result => |path| path orelse break, + }; + // const path = next.result orelse break; - // var iter = Glob.BunGlobWalker.Iterator{ .walker = &walker }; - // _ = try iter.init(); + const json_file = std.fs.cwd().openFile( + path, + .{ .mode = .read_only }, + ) catch { + continue; + }; + defer json_file.close(); - // while (switch (try iter.next()) { - // .err => |err| return .{ .err = err }, - // .result => |matched_path| matched_path, - // }) |path| { - // // try workspace_map.insert() - // try this.matchedPaths.append(this.arena.allocator(), BunString.fromBytes(path)); - // } + const json_stat_size = try json_file.getEndPos(); + const json_buf = try allocator.alloc(u8, json_stat_size + 64); + defer allocator.free(json_buf); - // if we were not able to find a workspace root, try globbing for package.json files + const json_len = try json_file.preadAll(json_buf, 0); + const json_source = logger.Source.initPathString(path, json_buf[0..json_len]); - return error.MissingPackageJSON; + var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, log); + _ = try parser.parseExpr(); + if (!parser.has_found_name) { + continue; + } + const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, parser.found_name), .version = null, .name_loc = logger.Loc.Empty }; + const dirpath = std.fs.path.dirname(path) orelse continue; + try workspace_map.insert(try allocator.dupe(u8, dirpath), entry); + } } pub fn parse(allocator: std.mem.Allocator, ctx: *Command.Context, comptime cmd: Command.Tag) !Api.TransformOptions { @@ -508,13 +557,11 @@ pub const Arguments = struct { // if (load_lockfile == .not_found) { // find the paths of all projects that match this filter - bun.JSAst.Expr.Data.Store.create(default_allocator); - bun.JSAst.Stmt.Data.Store.create(default_allocator); var wsmap = Package.WorkspaceMap.init(allocator); defer wsmap.deinit(); // find the root package.json of the workspace and load the child packages into workspace map - findWorkspaceRoot(allocator, ctx.log, &wsmap, cwd) catch |err| { + findWorkspaceMembers(allocator, ctx.log, &wsmap, cwd) catch |err| { if (comptime Environment.allow_assert) { if (@errorReturnTrace()) |trace| { std.debug.print("Error: {s}\n{}\n", .{ @errorName(err), trace }); @@ -526,56 +573,6 @@ pub const Arguments = struct { }; var matched_paths = std.ArrayListUnmanaged([]u8){}; - // const PkgInfo = struct { - // // borrowed - // path: []const u8, - // // owned - // name: []const u8, - // }; - - // var packages = std.ArrayList(PkgInfo).init(allocator); - // defer { - // for (packages.items) |package| { - // allocator.free(package.name); - // } - // packages.deinit(); - // } - - // bun.JSAst.Expr.Data.Store.create(allocator); - // bun.JSAst.Stmt.Data.Store.create(allocator); - - // for each path in the workspace map, load the name from package.json - // for (wsmap.keys(), wsmap.values()) |path, entry| { - // // std.debug.print("path: {s}\n, name: {s}\n", .{ path, entry.name }); - // var local_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined; - // const pkgstring = "/package.json"; - // @memcpy(local_buf[0..path.len], path); - // local_buf[path.len..local_buf.len][0..pkgstring.len].* = pkgstring.*; - // local_buf[path.len + pkgstring.len] = 0; - // const json_path = local_buf[0 .. path.len + pkgstring.len]; - - // const json_file = std.fs.cwd().openFileZ( - // local_buf[0 .. path.len + pkgstring.len :0].ptr, - // .{ .mode = .read_only }, - // ) catch { - // continue; - // }; - // defer json_file.close(); - - // const json_stat_size = try json_file.getEndPos(); - // const json_buf = try allocator.alloc(u8, json_stat_size + 64); - // defer allocator.free(json_buf); - - // const json_len = try json_file.preadAll(json_buf, 0); - // const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); - - // var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, ctx.log); - // _ = try parser.parseExpr(); - // if (!parser.has_found_name) { - // continue; - // } - // try packages.append(PkgInfo{ .path = path, .name = try allocator.dupe(u8, parser.found_name) }); - // } var pattern_stack = std.heap.stackFallback(4096, bun.default_allocator); var pattern = std.ArrayList(u32).init(pattern_stack.get()); @@ -605,8 +602,11 @@ pub const Arguments = struct { } } } - // bun.JSAst.Expr.Data.Store.reset(); - // bun.JSAst.Stmt.Data.Store.reset(); + + if (matched_paths.items.len == 0) { + Output.prettyErrorln("error: No packages matched the filter", .{}); + Global.exit(1); + } ctx.workspace_paths = matched_paths.items; } From 44ea630b402e8bc41fd9636b894f91be9cce6e5a Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Wed, 17 Jan 2024 11:48:51 -0800 Subject: [PATCH 14/74] move filter arg handling to separate source file --- src/cli.zig | 190 ++-------------------------------------- src/cli/filter_arg.zig | 183 ++++++++++++++++++++++++++++++++++++++ src/cli/run_command.zig | 24 ++++- 3 files changed, 208 insertions(+), 189 deletions(-) create mode 100644 src/cli/filter_arg.zig diff --git a/src/cli.zig b/src/cli.zig index 89c175212c3c2..31fa270f618f3 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -373,127 +373,6 @@ pub const Arguments = struct { return try loadConfig(allocator, args.option("--config"), ctx, comptime cmd); } - fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *logger.Log, workspace_map: *Package.WorkspaceMap, workdir_: []const u8) !void { - bun.JSAst.Expr.Data.Store.create(default_allocator); - bun.JSAst.Stmt.Data.Store.create(default_allocator); - - defer { - bun.JSAst.Expr.Data.Store.reset(); - bun.JSAst.Stmt.Data.Store.reset(); - } - - var workdir = workdir_; - - while (true) : (workdir = std.fs.path.dirname(workdir) orelse break) { - const parent_trimmed = strings.withoutTrailingSlash(workdir); - var buf2: [bun.MAX_PATH_BYTES + 1]u8 = undefined; - @memcpy(buf2[0..parent_trimmed.len], parent_trimmed); - buf2[parent_trimmed.len..buf2.len][0.."/package.json".len].* = "/package.json".*; - buf2[parent_trimmed.len + "/package.json".len] = 0; - const json_path = buf2[0 .. parent_trimmed.len + "/package.json".len]; - log.msgs.clearRetainingCapacity(); - log.errors = 0; - log.warnings = 0; - - const json_file = std.fs.cwd().openFileZ( - buf2[0 .. parent_trimmed.len + "/package.json".len :0].ptr, - .{ .mode = .read_only }, - ) catch continue; - defer json_file.close(); - - const json_stat_size = try json_file.getEndPos(); - const json_buf = try allocator.alloc(u8, json_stat_size + 64); - defer allocator.free(json_buf); - const json_len = try json_file.preadAll(json_buf, 0); - const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); - const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator); - - const prop = json.asProperty("workspaces") orelse continue; - - const json_array = switch (prop.expr.data) { - .e_array => |arr| arr, - .e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) { - .e_array => |arr| arr, - else => break, - } else break, - else => break, - }; - _ = Package.processWorkspaceNamesArray( - workspace_map, - allocator, - log, - json_array, - &json_source, - prop.loc, - null, - ) catch |err| { - return err; - }; - return; - } - - // if we were not able to find a workspace root, try globbing for package.json files - - var walker = Glob.BunGlobWalker{}; - var arena = std.heap.ArenaAllocator.init(allocator); - const walker_init_res = try walker.init(&arena, "**/package.json", true, true, false, true, true); - switch (walker_init_res) { - .err => |err| { - Output.prettyErrorln("Error: {}", .{err}); - return; - }, - else => {}, - } - defer walker.deinit(true); - - var iter = Glob.BunGlobWalker.Iterator{ .walker = &walker }; - const iter_init_res = try iter.init(); - switch (iter_init_res) { - .err => |err| { - Output.prettyErrorln("Error: {}", .{err}); - return; - }, - else => {}, - } - defer iter.deinit(); - - while (true) { - const next = try iter.next(); - const path = switch (next) { - .err => |err| { - Output.prettyErrorln("Error: {}", .{err}); - continue; - }, - .result => |path| path orelse break, - }; - // const path = next.result orelse break; - - const json_file = std.fs.cwd().openFile( - path, - .{ .mode = .read_only }, - ) catch { - continue; - }; - defer json_file.close(); - - const json_stat_size = try json_file.getEndPos(); - const json_buf = try allocator.alloc(u8, json_stat_size + 64); - defer allocator.free(json_buf); - - const json_len = try json_file.preadAll(json_buf, 0); - const json_source = logger.Source.initPathString(path, json_buf[0..json_len]); - - var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, log); - _ = try parser.parseExpr(); - if (!parser.has_found_name) { - continue; - } - const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, parser.found_name), .version = null, .name_loc = logger.Loc.Empty }; - const dirpath = std.fs.path.dirname(path) orelse continue; - try workspace_map.insert(try allocator.dupe(u8, dirpath), entry); - } - } - pub fn parse(allocator: std.mem.Allocator, ctx: *Command.Context, comptime cmd: Command.Tag) !Api.TransformOptions { var diag = clap.Diagnostic{}; const params_to_parse = comptime cmd.params(); @@ -548,68 +427,7 @@ pub const Arguments = struct { cwd = try bun.getcwd(&cwd_buf); } - const filters = args.options("--filter"); - if (filters.len > 0) { - ctx.has_filter = true; - // TODO in the future we can try loading the lockfile to get the workspace information more quickly - // var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); - // const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); - // if (load_lockfile == .not_found) { - - // find the paths of all projects that match this filter - - var wsmap = Package.WorkspaceMap.init(allocator); - defer wsmap.deinit(); - // find the root package.json of the workspace and load the child packages into workspace map - findWorkspaceMembers(allocator, ctx.log, &wsmap, cwd) catch |err| { - if (comptime Environment.allow_assert) { - if (@errorReturnTrace()) |trace| { - std.debug.print("Error: {s}\n{}\n", .{ @errorName(err), trace }); - } - } - Output.err(err, "Failed to find workspace root in {s}", .{cwd}); - ctx.log.printForLogLevelColorsRuntime(Output.errorWriter(), Output.enable_ansi_colors) catch {}; - Global.exit(1); - }; - - var matched_paths = std.ArrayListUnmanaged([]u8){}; - - var pattern_stack = std.heap.stackFallback(4096, bun.default_allocator); - var pattern = std.ArrayList(u32).init(pattern_stack.get()); - defer pattern.deinit(); - - // check each pattern against each package name - for (filters) |pattern_utf8_| { - var pattern_utf8 = pattern_utf8_; - var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - - const is_file_pattern = pattern_utf8.len > 0 and pattern_utf8[0] == '.'; - if (is_file_pattern) { - const parts = [_]string{pattern_utf8}; - pattern_utf8 = bun.path.joinAbsStringBuf(cwd, &path_buf, &parts, .auto); - } - - pattern.clearRetainingCapacity(); - var codepointer_iter = strings.UnsignedCodepointIterator.init(pattern_utf8); - var cursor = strings.UnsignedCodepointIterator.Cursor{}; - while (codepointer_iter.next(&cursor)) { - try pattern.append(cursor.c); - } - for (wsmap.keys(), wsmap.values()) |path, entry| { - const target = if (is_file_pattern) path else entry.name; - if (Glob.matchImpl(pattern.items, target)) { - try matched_paths.append(allocator, try allocator.dupe(u8, path)); - } - } - } - - if (matched_paths.items.len == 0) { - Output.prettyErrorln("error: No packages matched the filter", .{}); - Global.exit(1); - } - - ctx.workspace_paths = matched_paths.items; - } + ctx.filters = args.options("--filter"); if (cmd == .TestCommand) { if (args.option("--timeout")) |timeout_ms| { @@ -1281,8 +1099,7 @@ pub const Command = struct { bundler_options: BundlerOptions = BundlerOptions{}, runtime_options: RuntimeOptions = RuntimeOptions{}, - workspace_paths: [][]const u8 = &[_][]const u8{}, - has_filter: bool = false, + filters: []const []const u8 = &[_][]const u8{}, preloads: []const string = &[_]string{}, has_loaded_global_config: bool = false, @@ -1914,6 +1731,9 @@ pub const Command = struct { } if (ctx.positionals.len > 0 and extension.len == 0) { + if (ctx.filters.len > 0) { + Output.prettyln("warn: Filters are ignored for auto command", .{}); + } if (try RunCommand.exec(ctx, true, false)) { return; } diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig new file mode 100644 index 0000000000000..e1da6da1bd24e --- /dev/null +++ b/src/cli/filter_arg.zig @@ -0,0 +1,183 @@ +const std = @import("std"); +const root = @import("root"); +const bun = root.bun; +const string = bun.string; +const Output = bun.Output; +const Global = bun.Global; +const strings = bun.strings; +const json_parser = bun.JSON; +const Glob = @import("../glob.zig"); + +const Package = @import("../install/lockfile.zig").Package; + +fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *bun.logger.Log, workspace_map: *Package.WorkspaceMap, workdir_: []const u8) !void { + bun.JSAst.Expr.Data.Store.create(bun.default_allocator); + bun.JSAst.Stmt.Data.Store.create(bun.default_allocator); + + defer { + bun.JSAst.Expr.Data.Store.reset(); + bun.JSAst.Stmt.Data.Store.reset(); + } + + var workdir = workdir_; + + while (true) : (workdir = std.fs.path.dirname(workdir) orelse break) { + const parent_trimmed = strings.withoutTrailingSlash(workdir); + var buf2: [bun.MAX_PATH_BYTES + 1]u8 = undefined; + @memcpy(buf2[0..parent_trimmed.len], parent_trimmed); + buf2[parent_trimmed.len..buf2.len][0.."/package.json".len].* = "/package.json".*; + buf2[parent_trimmed.len + "/package.json".len] = 0; + const json_path = buf2[0 .. parent_trimmed.len + "/package.json".len]; + log.msgs.clearRetainingCapacity(); + log.errors = 0; + log.warnings = 0; + + const json_file = std.fs.cwd().openFileZ( + buf2[0 .. parent_trimmed.len + "/package.json".len :0].ptr, + .{ .mode = .read_only }, + ) catch continue; + defer json_file.close(); + + const json_stat_size = try json_file.getEndPos(); + const json_buf = try allocator.alloc(u8, json_stat_size + 64); + defer allocator.free(json_buf); + const json_len = try json_file.preadAll(json_buf, 0); + const json_source = bun.logger.Source.initPathString(json_path, json_buf[0..json_len]); + const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator); + + const prop = json.asProperty("workspaces") orelse continue; + + const json_array = switch (prop.expr.data) { + .e_array => |arr| arr, + .e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) { + .e_array => |arr| arr, + else => break, + } else break, + else => break, + }; + _ = Package.processWorkspaceNamesArray( + workspace_map, + allocator, + log, + json_array, + &json_source, + prop.loc, + null, + ) catch |err| { + return err; + }; + return; + } + + // if we were not able to find a workspace root, try globbing for package.json files + + var walker = Glob.BunGlobWalker{}; + var arena = std.heap.ArenaAllocator.init(allocator); + const walker_init_res = try walker.init(&arena, "**/package.json", true, true, false, true, true); + switch (walker_init_res) { + .err => |err| { + Output.prettyErrorln("Error: {}", .{err}); + return; + }, + else => {}, + } + defer walker.deinit(true); + + var iter = Glob.BunGlobWalker.Iterator{ .walker = &walker }; + const iter_init_res = try iter.init(); + switch (iter_init_res) { + .err => |err| { + Output.prettyErrorln("Error: {}", .{err}); + return; + }, + else => {}, + } + defer iter.deinit(); + + while (true) { + const next = try iter.next(); + const path = switch (next) { + .err => |err| { + Output.prettyErrorln("Error: {}", .{err}); + continue; + }, + .result => |path| path orelse break, + }; + + const json_file = std.fs.cwd().openFile( + path, + .{ .mode = .read_only }, + ) catch { + continue; + }; + defer json_file.close(); + + const json_stat_size = try json_file.getEndPos(); + const json_buf = try allocator.alloc(u8, json_stat_size + 64); + defer allocator.free(json_buf); + + const json_len = try json_file.preadAll(json_buf, 0); + const json_source = bun.logger.Source.initPathString(path, json_buf[0..json_len]); + + var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, log); + _ = try parser.parseExpr(); + if (!parser.has_found_name) { + continue; + } + const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, parser.found_name), .version = null, .name_loc = bun.logger.Loc.Empty }; + const dirpath = std.fs.path.dirname(path) orelse continue; + try workspace_map.insert(try allocator.dupe(u8, dirpath), entry); + } +} + +pub fn getFilteredPackages(ctx: bun.CLI.Command.Context, cwd: []const u8, paths: *std.ArrayList([]u8)) !void { + // TODO in the future we can try loading the lockfile to get the workspace information more quickly + // var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); + // const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); + // if (load_lockfile == .not_found) { + + // find the paths of all projects that match this filter + + var wsmap = Package.WorkspaceMap.init(ctx.allocator); + defer wsmap.deinit(); + // find the root package.json of the workspace and load the child packages into workspace map + findWorkspaceMembers(ctx.allocator, ctx.log, &wsmap, cwd) catch |err| { + if (comptime bun.Environment.allow_assert) { + if (@errorReturnTrace()) |trace| { + std.debug.print("Error: {s}\n{}\n", .{ @errorName(err), trace }); + } + } + Output.err(err, "Failed to find workspace root in {s}", .{cwd}); + ctx.log.printForLogLevelColorsRuntime(Output.errorWriter(), Output.enable_ansi_colors) catch {}; + Global.exit(1); + }; + + var pattern_stack = std.heap.stackFallback(4096, bun.default_allocator); + var pattern = std.ArrayList(u32).init(pattern_stack.get()); + defer pattern.deinit(); + + // check each pattern against each package name + for (ctx.filters) |pattern_utf8_| { + var pattern_utf8 = pattern_utf8_; + var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + + const is_file_pattern = pattern_utf8.len > 0 and pattern_utf8[0] == '.'; + if (is_file_pattern) { + const parts = [_]string{pattern_utf8}; + pattern_utf8 = bun.path.joinAbsStringBuf(cwd, &path_buf, &parts, .auto); + } + + pattern.clearRetainingCapacity(); + var codepointer_iter = strings.UnsignedCodepointIterator.init(pattern_utf8); + var cursor = strings.UnsignedCodepointIterator.Cursor{}; + while (codepointer_iter.next(&cursor)) { + try pattern.append(cursor.c); + } + for (wsmap.keys(), wsmap.values()) |path, entry| { + const target = if (is_file_pattern) path else entry.name; + if (Glob.matchImpl(pattern.items, target)) { + try paths.append(try ctx.allocator.dupe(u8, path)); + } + } + } +} diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index f7cf467fd2557..a8b8c1b43ad8c 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -52,6 +52,7 @@ const PosixSpawn = bun.posix.spawn; const PackageManager = @import("../install/install.zig").PackageManager; const Lockfile = @import("../install/lockfile.zig"); +const FilterArg = @import("filter_arg.zig"); const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess; @@ -1009,11 +1010,12 @@ pub const RunCommand = struct { } pub fn execAll(ctx: Command.Context, comptime bin_dirs_only: bool) !void { - // if there are no workspace paths specified, run in the current directory - if (!ctx.has_filter) { + // without filters just behave like normal exec + if (ctx.filters.len == 0) { _ = try exec(ctx, bin_dirs_only, true); return; } + const fsinstance = try bun.fs.FileSystem.init(null); const olddir = fsinstance.top_level_dir; defer { @@ -1024,9 +1026,23 @@ pub const RunCommand = struct { Global.crash(); }; } + var workspace_paths = std.ArrayList([]u8).init(ctx.allocator); + defer { + for (workspace_paths.items) |path| { + ctx.allocator.free(path); + } + workspace_paths.deinit(); + } + try FilterArg.getFilteredPackages(ctx, olddir, &workspace_paths); + + if (workspace_paths.items.len == 0) { + Output.prettyErrorln("error: No packages matched the filter", .{}); + Global.exit(1); + } + var ok = true; - for (ctx.workspace_paths) |path| { - Output.prettyErrorln("In {s}:", .{path}); + for (workspace_paths.items) |path| { + Output.prettyErrorln("In {s}:", .{path}); std.os.chdir(path) catch |err| { Output.prettyErrorln("error: Failed to change directory to {s} due to error {s}", .{ path, @errorName(err) }); continue; From 9d171188647fb0f157a0a4d4679dd15ee35534c0 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Wed, 17 Jan 2024 15:16:19 -0800 Subject: [PATCH 15/74] use bun.sys.chdir, match root package for scripts --- src/cli.zig | 11 ++++++---- src/cli/filter_arg.zig | 24 +++++++++++++++------ src/cli/run_command.zig | 22 ++++++++++++------- test/cli/run/filter-workspace.test.ts | 31 +++++++++++++-------------- 4 files changed, 54 insertions(+), 34 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index 31fa270f618f3..aaa8478c1c6cd 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -417,10 +417,13 @@ pub const Arguments = struct { Output.prettyErrorln("error resolving --cwd: {s}", .{@errorName(err)}); Global.exit(1); }; - std.os.chdir(out) catch |err| { - Output.prettyErrorln("error setting --cwd: {s}", .{@errorName(err)}); - Global.exit(1); - }; + switch (bun.sys.chdir(out)) { + .err => |err| { + Output.prettyErrorln("error setting --cwd to {s} due to error {}", .{ out, err }); + Global.crash(); + }, + .result => {}, + } cwd = try allocator.dupe(u8, out); ctx.args.cwd_override = cwd; } else { diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index e1da6da1bd24e..26c9ee1cbee09 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -23,17 +23,18 @@ fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *bun.logger.Log, work while (true) : (workdir = std.fs.path.dirname(workdir) orelse break) { const parent_trimmed = strings.withoutTrailingSlash(workdir); - var buf2: [bun.MAX_PATH_BYTES + 1]u8 = undefined; - @memcpy(buf2[0..parent_trimmed.len], parent_trimmed); - buf2[parent_trimmed.len..buf2.len][0.."/package.json".len].* = "/package.json".*; - buf2[parent_trimmed.len + "/package.json".len] = 0; - const json_path = buf2[0 .. parent_trimmed.len + "/package.json".len]; + var name_buf: bun.PathBuffer = undefined; + @memcpy(name_buf[0..parent_trimmed.len], parent_trimmed); + name_buf[parent_trimmed.len..name_buf.len][0.."/package.json".len].* = "/package.json".*; + name_buf[parent_trimmed.len + "/package.json".len] = 0; + const json_path = name_buf[0 .. parent_trimmed.len + "/package.json".len]; + log.msgs.clearRetainingCapacity(); log.errors = 0; log.warnings = 0; const json_file = std.fs.cwd().openFileZ( - buf2[0 .. parent_trimmed.len + "/package.json".len :0].ptr, + name_buf[0 .. parent_trimmed.len + "/package.json".len :0].ptr, .{ .mode = .read_only }, ) catch continue; defer json_file.close(); @@ -55,6 +56,12 @@ fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *bun.logger.Log, work } else break, else => break, }; + const name_prop = json.asProperty("name") orelse continue; + const name = switch (name_prop.expr.data) { + .e_string => |n| n.data, + else => break, + }; + _ = Package.processWorkspaceNamesArray( workspace_map, allocator, @@ -66,6 +73,11 @@ fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *bun.logger.Log, work ) catch |err| { return err; }; + + // add the root package to the workspace map, too + const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, name), .version = null, .name_loc = bun.logger.Loc.Empty }; + try workspace_map.insert(try allocator.dupe(u8, parent_trimmed), entry); + return; } diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index a8b8c1b43ad8c..31a916c4f0cf9 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1021,10 +1021,13 @@ pub const RunCommand = struct { defer { // change back to the original directory once we're done fsinstance.top_level_dir = olddir; - std.os.chdir(olddir) catch |err| { - Output.prettyErrorln("error: Failed to change directory to {s} due to error {s}", .{ olddir, @errorName(err) }); - Global.crash(); - }; + switch (bun.sys.chdir(olddir)) { + .err => |err| { + Output.prettyErrorln("error: Failed to change directory to {s} due to error {}", .{ olddir, err }); + Global.crash(); + }, + .result => {}, + } } var workspace_paths = std.ArrayList([]u8).init(ctx.allocator); defer { @@ -1043,10 +1046,13 @@ pub const RunCommand = struct { var ok = true; for (workspace_paths.items) |path| { Output.prettyErrorln("In {s}:", .{path}); - std.os.chdir(path) catch |err| { - Output.prettyErrorln("error: Failed to change directory to {s} due to error {s}", .{ path, @errorName(err) }); - continue; - }; + switch (bun.sys.chdir(path)) { + .err => |err| { + Output.prettyErrorln("error: Failed to change directory to {s} due to error {}", .{ path, err }); + Global.crash(); + }, + .result => {}, + } fsinstance.top_level_dir = path; const res = exec(ctx, bin_dirs_only, true) catch |err| { Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index e97f73de05f66..9b6bbdd6fc314 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -10,7 +10,7 @@ let cwd_root = tempDirWithFiles("testworkspace", { "package.json": JSON.stringify({ "name": "pkga", "scripts": { - "present": "echo 1234", + "present": "echo scripta", }, }), }, @@ -18,7 +18,7 @@ let cwd_root = tempDirWithFiles("testworkspace", { "package.json": JSON.stringify({ "name": "pkgb", "scripts": { - "present": "echo 4321", + "present": "echo scriptb", }, }), }, @@ -26,13 +26,16 @@ let cwd_root = tempDirWithFiles("testworkspace", { "package.json": JSON.stringify({ "name": "pkgc", "scripts": { - "present": "echo 5678", + "present": "echo scriptc", }, }), }, }, "package.json": JSON.stringify({ "name": "ws", + "scripts": { + "present": "echo rootscript", + }, "workspaces": ["packages/pkga", "packages/pkgb", "packages/dirname"], }), }); @@ -57,9 +60,6 @@ function runInCwdSuccess( cmd.push("--filter", pattern); } cmd.push("present"); - console.log(cmd); - console.log(cwd); - console.log(cwd_root); const { exitCode, stdout, stderr } = spawnSync({ cwd: cwd, cmd: cmd, @@ -68,7 +68,6 @@ function runInCwdSuccess( stderr: "pipe", }); const stdoutval = stdout.toString(); - console.log(stdoutval, stderr.toString()); for (let r of target_pattern instanceof Array ? target_pattern : [target_pattern]) { expect(stdoutval).toMatch(r); } @@ -99,15 +98,15 @@ describe("bun", () => { let packages = [ { name: "pkga", - output: /1234/, + output: /scripta/, }, { name: "pkgb", - output: /4321/, + output: /scriptb/, }, { name: "pkgc", - output: /5678/, + output: /scriptc/, }, ]; @@ -122,21 +121,21 @@ describe("bun", () => { for (let d of dirs) { test(`resolve '*' from ${d}`, () => { - runInCwdSuccess(d, "*", [/1234/, /4321/, /5678/]); + runInCwdSuccess(d, "*", [/scripta/, /scriptb/, /scriptc/, /rootscript/]); }); test(`resolve all from ${d}`, () => { - runInCwdSuccess(d, names, [/1234/, /4321/, /5678/]); + runInCwdSuccess(d, names, [/scripta/, /scriptb/, /scriptc/]); }); } test("resolve all with glob", () => { - runInCwdSuccess(cwd_root, "./packages/*", [/1234/, /4321/, /5678/]); + runInCwdSuccess(cwd_root, "./packages/*", [/scripta/, /scriptb/, /scriptc/]); }); - test("resolve all with recursie glob", () => { - runInCwdSuccess(cwd_root, "./**", [/1234/, /4321/, /5678/]); + test("resolve all with recursive glob", () => { + runInCwdSuccess(cwd_root, "./**", [/scripta/, /scriptb/, /scriptc/]); }); test("resolve 'pkga' and 'pkgb' but not 'pkgc' with targeted glob", () => { - runInCwdSuccess(cwd_root, "./packages/pkg*", [/1234/, /4321/], /5678/); + runInCwdSuccess(cwd_root, "./packages/pkg*", [/scripta/, /scriptb/], /scriptc/); }); test("should error with missing script", () => { From e68e7b0969219be9515e1af75fca8bb10b1d41da Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Wed, 17 Jan 2024 16:31:56 -0800 Subject: [PATCH 16/74] fix exit code handling --- src/cli.zig | 8 +++-- src/cli/run_command.zig | 72 +++++++++++++++++++++++++++-------------- 2 files changed, 54 insertions(+), 26 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index aaa8478c1c6cd..684324df43ae7 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -1737,8 +1737,12 @@ pub const Command = struct { if (ctx.filters.len > 0) { Output.prettyln("warn: Filters are ignored for auto command", .{}); } - if (try RunCommand.exec(ctx, true, false)) { - return; + switch (try RunCommand.exec(ctx, true, false)) { + .failure => {}, + .ok => return, + .code => |code| { + Global.exit(code); + }, } Output.prettyErrorln("error: Script not found \"{s}\"", .{ diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 31a916c4f0cf9..d4f6a98e25100 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -384,7 +384,7 @@ pub const RunCommand = struct { env: *DotEnv.Loader, passthrough: []const string, original_script_for_bun_run: ?[]const u8, - ) !bool { + ) !ExecResult { var argv_ = [_]string{executable}; var argv: []const string = &argv_; @@ -417,13 +417,13 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - return false; + return ExecResult.failure; } } } } Output.prettyErrorln("error: Failed to run \"{s}\" due to error {s}", .{ basenameOrBun(executable), @errorName(err) }); - return false; + return ExecResult.failure; }; switch (result) { .Exited => |code| { @@ -452,7 +452,7 @@ pub const RunCommand = struct { Output.errGeneric("\"{s}\" exited with code {d}", .{ basenameOrBun(executable), code }); } } - return false; + return ExecResult.withCode(code); }, .Signal, .Stopped => |sig| { // forward the signal to the shell / parent process @@ -462,13 +462,14 @@ pub const RunCommand = struct { } else if (!silent) { std.debug.panic("\"{s}\" stopped by signal code 0, which isn't supposed to be possible", .{executable}); } + // here we exit immediately, so we don't need to return a code Global.exit(128 + @as(u8, @as(u7, @truncate(sig)))); }, .Unknown => |sig| { if (!silent) { Output.errGeneric("\"{s}\" stopped with unknown state {d}", .{ basenameOrBun(executable), sig }); } - return false; + return ExecResult.failure; }, } @@ -1009,10 +1010,35 @@ pub const RunCommand = struct { Output.flush(); } + const ExecResult = union(enum) { + code: u8, + failure, + ok, + + pub fn withCode(code: u8) ExecResult { + return ExecResult{ .code = code }; + } + + // const ok = ExecResult{.ok = void}; + // const failure = ExecResult.failure; + + pub fn notFailure(self: ExecResult) bool { + return switch (self) { + .ok => true, + .failure => false, + .code => |code| code == 0, + }; + } + }; + pub fn execAll(ctx: Command.Context, comptime bin_dirs_only: bool) !void { // without filters just behave like normal exec if (ctx.filters.len == 0) { - _ = try exec(ctx, bin_dirs_only, true); + switch (try exec(ctx, bin_dirs_only, true)) { + .ok => return, + .failure => Global.exit(1), + .code => |code| Global.exit(code), + } return; } @@ -1058,7 +1084,7 @@ pub const RunCommand = struct { Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); continue; }; - ok = ok and res; + ok = ok and res.notFailure(); // flush outputs to ensure that stdout and stderr are in the correct order for each of the paths Output.flush(); } @@ -1067,7 +1093,7 @@ pub const RunCommand = struct { } } - pub fn exec(ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !bool { + pub fn exec(ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !ExecResult { var ctx = ctx_; // Step 1. Figure out what we're trying to run @@ -1103,9 +1129,9 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - return false; + return ExecResult.failure; }; - return true; + return ExecResult.ok; } if (log_errors or force_using_bun) { @@ -1171,7 +1197,7 @@ pub const RunCommand = struct { const shebang_size = file.pread(&shebang_buf, 0) catch |err| { if (!ctx.debug.silent) Output.prettyErrorln("error: Failed to read file {s} due to error {s}", .{ file_path, @errorName(err) }); - return false; + return ExecResult.failure; }; var shebang: string = shebang_buf[0..shebang_size]; @@ -1203,10 +1229,10 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - return false; + return ExecResult.failure; }; - return true; + return ExecResult.ok; } } } @@ -1224,7 +1250,7 @@ pub const RunCommand = struct { 0 => { // naked "bun run" RunCommand.printHelp(package_json); - return true; + return ExecResult.ok; }, else => { if (scripts.get(script_name_to_search)) |script_content| { @@ -1242,7 +1268,7 @@ pub const RunCommand = struct { &.{}, ctx.debug.silent, )) { - return false; + return ExecResult.failure; } } @@ -1254,7 +1280,7 @@ pub const RunCommand = struct { this_bundler.env, passthrough, ctx.debug.silent, - )) return false; + )) return ExecResult.failure; temp_script_buffer[0.."post".len].* = "post".*; @@ -1268,11 +1294,11 @@ pub const RunCommand = struct { &.{}, ctx.debug.silent, )) { - return false; + return ExecResult.failure; } } - return true; + return ExecResult.ok; } else if ((script_name_to_search.len > 1 and script_name_to_search[0] == '/') or (script_name_to_search.len > 2 and script_name_to_search[0] == '.' and script_name_to_search[1] == '/')) { @@ -1290,7 +1316,7 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - return false; + return ExecResult.failure; }; } }, @@ -1301,10 +1327,8 @@ pub const RunCommand = struct { if (script_name_to_search.len == 0) { if (comptime log_errors) { Output.prettyError("No \"scripts\" in package.json found.\n", .{}); - return false; } - - return false; + return ExecResult.failure; } const PATH = this_bundler.env.map.get("PATH") orelse ""; @@ -1348,14 +1372,14 @@ pub const RunCommand = struct { } if (ctx.runtime_options.if_present) { - return true; + return ExecResult.ok; } if (comptime log_errors) { Output.prettyError("error: Script not found \"{s}\"\n", .{script_name_to_search}); } - return false; + return ExecResult.failure; } pub fn execAsIfNode(ctx: Command.Context) !void { From af5728cfc76905336771ea5cb8cfdd3185f2df85 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Wed, 17 Jan 2024 19:19:22 -0800 Subject: [PATCH 17/74] ignore node_modules and directories starting with . in --filter --- src/cli/filter_arg.zig | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index 26c9ee1cbee09..d334560d0ec8c 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -10,6 +10,23 @@ const Glob = @import("../glob.zig"); const Package = @import("../install/lockfile.zig").Package; +fn globIgnoreFn(val: []const u8) bool { + if (val.len == 0) { + return false; + } + // skip hidden directories + if (val[0] == '.') { + return true; + } + // skip ndoe_modules + if (strings.eql(val, "node_modules")) { + return true; + } + return false; +} + +const GlobWalker = Glob.GlobWalker_(globIgnoreFn); + fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *bun.logger.Log, workspace_map: *Package.WorkspaceMap, workdir_: []const u8) !void { bun.JSAst.Expr.Data.Store.create(bun.default_allocator); bun.JSAst.Stmt.Data.Store.create(bun.default_allocator); @@ -83,7 +100,7 @@ fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *bun.logger.Log, work // if we were not able to find a workspace root, try globbing for package.json files - var walker = Glob.BunGlobWalker{}; + var walker = GlobWalker{}; var arena = std.heap.ArenaAllocator.init(allocator); const walker_init_res = try walker.init(&arena, "**/package.json", true, true, false, true, true); switch (walker_init_res) { @@ -95,7 +112,7 @@ fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *bun.logger.Log, work } defer walker.deinit(true); - var iter = Glob.BunGlobWalker.Iterator{ .walker = &walker }; + var iter = GlobWalker.Iterator{ .walker = &walker }; const iter_init_res = try iter.init(); switch (iter_init_res) { .err => |err| { From ad182558629201eb623f2bf5ca2ab32142e97399 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Thu, 18 Jan 2024 00:38:53 -0800 Subject: [PATCH 18/74] progress converting --filter to use iterators --- src/cli/filter_arg.zig | 436 +++++++++++++++++++++++++++++----------- src/cli/run_command.zig | 81 +++++--- 2 files changed, 375 insertions(+), 142 deletions(-) diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index d334560d0ec8c..fa3f40d1717c1 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -18,8 +18,8 @@ fn globIgnoreFn(val: []const u8) bool { if (val[0] == '.') { return true; } - // skip ndoe_modules - if (strings.eql(val, "node_modules")) { + // skip node_modules + if (strings.eqlComptime(val, "node_modules")) { return true; } return false; @@ -27,7 +27,7 @@ fn globIgnoreFn(val: []const u8) bool { const GlobWalker = Glob.GlobWalker_(globIgnoreFn); -fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *bun.logger.Log, workspace_map: *Package.WorkspaceMap, workdir_: []const u8) !void { +pub fn getGlobPatterns(allocator: std.mem.Allocator, log: *bun.logger.Log, out_patterns: *std.ArrayList([]u8), workdir_: []const u8) !void { bun.JSAst.Expr.Data.Store.create(bun.default_allocator); bun.JSAst.Stmt.Data.Store.create(bun.default_allocator); @@ -73,140 +73,352 @@ fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *bun.logger.Log, work } else break, else => break, }; - const name_prop = json.asProperty("name") orelse continue; - const name = switch (name_prop.expr.data) { - .e_string => |n| n.data, - else => break, - }; - _ = Package.processWorkspaceNamesArray( - workspace_map, - allocator, - log, - json_array, - &json_source, - prop.loc, - null, - ) catch |err| { - return err; - }; - - // add the root package to the workspace map, too - const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, name), .version = null, .name_loc = bun.logger.Loc.Empty }; - try workspace_map.insert(try allocator.dupe(u8, parent_trimmed), entry); + for (json_array.slice()) |expr| { + switch (expr.data) { + .e_string => |pattern_expr| { + // /basepath/pattern/package.json + // const size = parent_trimmed.len + 1 + pattern_expr.data.len + "/package.json".len; + const size = pattern_expr.data.len + "/package.json".len; + var pattern = try allocator.alloc(u8, size); + @memcpy(pattern[0..pattern_expr.data.len], pattern_expr.data); + @memcpy(pattern[pattern_expr.data.len..size], "/package.json"); + // @memcpy(pattern[0..parent_trimmed.len], parent_trimmed); + // pattern[parent_trimmed.len] = '/'; + // @memcpy(pattern[parent_trimmed.len + 1 .. parent_trimmed.len + 1 + pattern_expr.data.len], pattern_expr.data); + // @memcpy(pattern[parent_trimmed.len + 1 + pattern_expr.data.len .. size], "/package.json"); + try out_patterns.append(pattern); + }, + else => { + // TODO log error and fail + Global.crash(); + }, + } + } + std.os.chdir(parent_trimmed) catch unreachable; return; + + // const name = switch (name_prop.expr.data) { + // .e_string => |n| n.data, + // else => break, + // }; + + // _ = Package.processWorkspaceNamesArray( + // workspace_map, + // allocator, + // log, + // json_array, + // &json_source, + // prop.loc, + // null, + // ) catch |err| { + // return err; + // }; + + // // add the root package to the workspace map, too + // const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, name), .version = null, .name_loc = bun.logger.Loc.Empty }; + // try workspace_map.insert(try allocator.dupe(u8, parent_trimmed), entry); } + try out_patterns.append(try allocator.dupe(u8, "**/package.json")); + // if we were not able to find a workspace root, try globbing for package.json files - var walker = GlobWalker{}; - var arena = std.heap.ArenaAllocator.init(allocator); - const walker_init_res = try walker.init(&arena, "**/package.json", true, true, false, true, true); - switch (walker_init_res) { - .err => |err| { - Output.prettyErrorln("Error: {}", .{err}); - return; - }, - else => {}, - } - defer walker.deinit(true); + // var walker = GlobWalker{}; + // var arena = std.heap.ArenaAllocator.init(allocator); + // const walker_init_res = try walker.init(&arena, "**/package.json", true, true, false, true, true); + // switch (walker_init_res) { + // .err => |err| { + // Output.prettyErrorln("Error: {}", .{err}); + // return; + // }, + // else => {}, + // } + // defer walker.deinit(true); - var iter = GlobWalker.Iterator{ .walker = &walker }; - const iter_init_res = try iter.init(); - switch (iter_init_res) { - .err => |err| { - Output.prettyErrorln("Error: {}", .{err}); - return; - }, - else => {}, - } - defer iter.deinit(); + // var iter = GlobWalker.Iterator{ .walker = &walker }; + // const iter_init_res = try iter.init(); + // switch (iter_init_res) { + // .err => |err| { + // Output.prettyErrorln("Error: {}", .{err}); + // return; + // }, + // else => {}, + // } + // defer iter.deinit(); - while (true) { - const next = try iter.next(); - const path = switch (next) { - .err => |err| { - Output.prettyErrorln("Error: {}", .{err}); - continue; - }, - .result => |path| path orelse break, - }; + // while (true) { + // const next = try iter.next(); + // const path = switch (next) { + // .err => |err| { + // Output.prettyErrorln("Error: {}", .{err}); + // continue; + // }, + // .result => |path| path orelse break, + // }; + + // const json_file = std.fs.cwd().openFile( + // path, + // .{ .mode = .read_only }, + // ) catch { + // continue; + // }; + // defer json_file.close(); + + // const json_stat_size = try json_file.getEndPos(); + // const json_buf = try allocator.alloc(u8, json_stat_size + 64); + // defer allocator.free(json_buf); + + // const json_len = try json_file.preadAll(json_buf, 0); + // const json_source = bun.logger.Source.initPathString(path, json_buf[0..json_len]); - const json_file = std.fs.cwd().openFile( + // var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, log); + // _ = try parser.parseExpr(); + // if (!parser.has_found_name) { + // continue; + // } + // const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, parser.found_name), .version = null, .name_loc = bun.logger.Loc.Empty }; + // const dirpath = std.fs.path.dirname(path) orelse continue; + // try workspace_map.insert(try allocator.dupe(u8, dirpath), entry); + // } +} + +// pub fn getFilteredPackages(ctx: bun.CLI.Command.Context, cwd: []const u8, paths: *std.ArrayList([]u8)) !void { +// // TODO in the future we can try loading the lockfile to get the workspace information more quickly +// // var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); +// // const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); +// // if (load_lockfile == .not_found) { + +// // find the paths of all projects that match this filter + +// var wsmap = Package.WorkspaceMap.init(ctx.allocator); +// defer wsmap.deinit(); +// // find the root package.json of the workspace and load the child packages into workspace map +// findWorkspaceMembers(ctx.allocator, ctx.log, &wsmap, cwd) catch |err| { +// if (comptime bun.Environment.allow_assert) { +// if (@errorReturnTrace()) |trace| { +// std.debug.print("Error: {s}\n{}\n", .{ @errorName(err), trace }); +// } +// } +// Output.err(err, "Failed to find workspace root in {s}", .{cwd}); +// ctx.log.printForLogLevelColorsRuntime(Output.errorWriter(), Output.enable_ansi_colors) catch {}; +// Global.exit(1); +// }; + +// var pattern_stack = std.heap.stackFallback(4096, bun.default_allocator); +// var pattern = std.ArrayList(u32).init(pattern_stack.get()); +// defer pattern.deinit(); + +// // check each pattern against each package name +// for (ctx.filters) |pattern_utf8_| { +// var pattern_utf8 = pattern_utf8_; +// var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + +// const is_file_pattern = pattern_utf8.len > 0 and pattern_utf8[0] == '.'; +// if (is_file_pattern) { +// const parts = [_]string{pattern_utf8}; +// pattern_utf8 = bun.path.joinAbsStringBuf(cwd, &path_buf, &parts, .auto); +// } + +// pattern.clearRetainingCapacity(); +// var codepointer_iter = strings.UnsignedCodepointIterator.init(pattern_utf8); +// var cursor = strings.UnsignedCodepointIterator.Cursor{}; +// while (codepointer_iter.next(&cursor)) { +// try pattern.append(cursor.c); +// } +// for (wsmap.keys(), wsmap.values()) |path, entry| { +// const target = if (is_file_pattern) path else entry.name; +// if (Glob.matchImpl(pattern.items, target)) { +// try paths.append(try ctx.allocator.dupe(u8, path)); +// } +// } +// } +// } + +pub const FilterSet = struct { + allocator: std.mem.Allocator, + filters: []Pattern, + has_name_filters: bool = false, + + const Pattern = struct { + codepoints: []u32, + kind: enum { + name, path, - .{ .mode = .read_only }, - ) catch { - continue; - }; - defer json_file.close(); + }, + // negate: bool = false, + }; - const json_stat_size = try json_file.getEndPos(); - const json_buf = try allocator.alloc(u8, json_stat_size + 64); - defer allocator.free(json_buf); + pub fn init(allocator: std.mem.Allocator, filters: []const []const u8, cwd: []const u8) !FilterSet { + var buf: bun.PathBuffer = undefined; + // TODO fixed buffer allocator with fallback? + var self = FilterSet{ .allocator = allocator, .filters = try allocator.alloc(Pattern, filters.len) }; + for (0.., filters) |idx, filter_utf8_| { + var filter_utf8 = filter_utf8_; + const is_path = filter_utf8.len > 0 and filter_utf8[0] == '.'; + if (is_path) { + const parts = [_]string{filter_utf8}; + filter_utf8 = bun.path.joinAbsStringBuf(cwd, &buf, &parts, .auto); + } + var filter_utf32 = std.ArrayListUnmanaged(u32){}; + var codepointer_iter = strings.UnsignedCodepointIterator.init(filter_utf8); + var cursor = strings.UnsignedCodepointIterator.Cursor{}; + while (codepointer_iter.next(&cursor)) { + try filter_utf32.append(self.allocator, cursor.c); + } + self.has_name_filters = self.has_name_filters or !is_path; + self.filters[idx] = Pattern{ + .codepoints = filter_utf32.items, + .kind = if (is_path) .path else .name, + }; + } + return self; + } - const json_len = try json_file.preadAll(json_buf, 0); - const json_source = bun.logger.Source.initPathString(path, json_buf[0..json_len]); + pub fn deinit(self: *FilterSet) void { + for (self.filters) |filter| { + // TODO is this free correct? we're freeing only part of the array + self.allocator.free(filter.codepoints); + } + self.allocator.free(self.filters); + } - var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, log); - _ = try parser.parseExpr(); - if (!parser.has_found_name) { - continue; + pub fn matchesPath(self: *FilterSet, path: []const u8) bool { + for (self.filters) |filter| { + if (Glob.matchImpl(filter.codepoints, path)) { + return true; + } } - const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, parser.found_name), .version = null, .name_loc = bun.logger.Loc.Empty }; - const dirpath = std.fs.path.dirname(path) orelse continue; - try workspace_map.insert(try allocator.dupe(u8, dirpath), entry); + return false; } -} -pub fn getFilteredPackages(ctx: bun.CLI.Command.Context, cwd: []const u8, paths: *std.ArrayList([]u8)) !void { - // TODO in the future we can try loading the lockfile to get the workspace information more quickly - // var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); - // const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); - // if (load_lockfile == .not_found) { - - // find the paths of all projects that match this filter - - var wsmap = Package.WorkspaceMap.init(ctx.allocator); - defer wsmap.deinit(); - // find the root package.json of the workspace and load the child packages into workspace map - findWorkspaceMembers(ctx.allocator, ctx.log, &wsmap, cwd) catch |err| { - if (comptime bun.Environment.allow_assert) { - if (@errorReturnTrace()) |trace| { - std.debug.print("Error: {s}\n{}\n", .{ @errorName(err), trace }); + pub fn matchesPathName(self: *FilterSet, path: []const u8, name: []const u8) bool { + for (self.filters) |filter| { + const target = switch (filter.kind) { + .name => name, + .path => path, + }; + if (Glob.matchImpl(filter.codepoints, target)) { + return true; } } - Output.err(err, "Failed to find workspace root in {s}", .{cwd}); - ctx.log.printForLogLevelColorsRuntime(Output.errorWriter(), Output.enable_ansi_colors) catch {}; - Global.exit(1); - }; + return false; + } +}; + +pub fn getPackageName(allocator: std.mem.Allocator, log: *bun.logger.Log, path: []const u8) !?[]u8 { + const json_file = try std.fs.cwd().openFile( + path, + .{ .mode = .read_only }, + ); + defer json_file.close(); + + const json_stat_size = try json_file.getEndPos(); + const json_buf = try allocator.alloc(u8, json_stat_size + 64); + defer allocator.free(json_buf); + + const json_len = try json_file.preadAll(json_buf, 0); + const json_source = bun.logger.Source.initPathString(path, json_buf[0..json_len]); - var pattern_stack = std.heap.stackFallback(4096, bun.default_allocator); - var pattern = std.ArrayList(u32).init(pattern_stack.get()); - defer pattern.deinit(); + var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, log); + _ = try parser.parseExpr(); + if (!parser.has_found_name) { + return null; + } + return try allocator.dupe(u8, parser.found_name); +} + +pub const PackageFilterIterator = struct { + patterns: []const []const u8, + pattern_idx: usize = 0, - // check each pattern against each package name - for (ctx.filters) |pattern_utf8_| { - var pattern_utf8 = pattern_utf8_; - var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + walker: GlobWalker = undefined, + iter: GlobWalker.Iterator = undefined, + valid: bool = false, - const is_file_pattern = pattern_utf8.len > 0 and pattern_utf8[0] == '.'; - if (is_file_pattern) { - const parts = [_]string{pattern_utf8}; - pattern_utf8 = bun.path.joinAbsStringBuf(cwd, &path_buf, &parts, .auto); + arena: std.heap.ArenaAllocator, + allocator: std.mem.Allocator, + + pub fn init(allocator: std.mem.Allocator, patterns: []const []const u8) !PackageFilterIterator { + return PackageFilterIterator{ + .patterns = patterns, + .allocator = allocator, + .arena = std.heap.ArenaAllocator.init(allocator), + }; + } + + pub fn deinit(self: *PackageFilterIterator) void { + if (self.valid) { + self.deinitWalker(); + } + self.arena.deinit(); + } + + fn walkerNext(self: *PackageFilterIterator) !?[]const u8 { + while (true) { + switch (try self.iter.next()) { + .err => |err| { + Output.prettyErrorln("Error: {}", .{err}); + continue; + }, + .result => |path| { + return path; + }, + } } + } - pattern.clearRetainingCapacity(); - var codepointer_iter = strings.UnsignedCodepointIterator.init(pattern_utf8); - var cursor = strings.UnsignedCodepointIterator.Cursor{}; - while (codepointer_iter.next(&cursor)) { - try pattern.append(cursor.c); + fn initWalker(self: *PackageFilterIterator) !void { + const pattern = self.patterns[self.pattern_idx]; + var arena = self.arena; + const walker_init_res = try self.walker.init(&arena, pattern, true, true, false, true, true); + switch (walker_init_res) { + .err => |err| { + // TODO + // return err; + _ = err; + Global.crash(); + }, + else => {}, } - for (wsmap.keys(), wsmap.values()) |path, entry| { - const target = if (is_file_pattern) path else entry.name; - if (Glob.matchImpl(pattern.items, target)) { - try paths.append(try ctx.allocator.dupe(u8, path)); + self.iter = GlobWalker.Iterator{ .walker = &self.walker }; + + const iter_init_res = try self.iter.init(); + switch (iter_init_res) { + .err => |err| { + // TODO + // return err; + _ = err; + Global.crash(); + }, + else => {}, + } + } + + fn deinitWalker(self: *PackageFilterIterator) void { + self.walker.deinit(false); + self.iter.deinit(); + _ = self.arena.reset(std.heap.ArenaAllocator.ResetMode.retain_capacity); + } + + pub fn next(self: *PackageFilterIterator) !?[]const u8 { + while (true) { + if (!self.valid) { + if (self.pattern_idx < self.patterns.len) { + try self.initWalker(); + self.valid = true; + } else { + return null; + } + } + if (try self.walkerNext()) |path| { + return path; + } else { + self.valid = false; + self.pattern_idx += 1; + self.deinitWalker(); } } } -} +}; diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 7caf2ff2b2f98..e4e82f9aa7540 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1055,42 +1055,63 @@ pub const RunCommand = struct { .result => {}, } } - var workspace_paths = std.ArrayList([]u8).init(ctx.allocator); + + var filter_instance = try FilterArg.FilterSet.init(ctx.allocator, ctx.filters, olddir); + defer filter_instance.deinit(); + + var patterns = std.ArrayList([]u8).init(ctx.allocator); defer { - for (workspace_paths.items) |path| { + for (patterns.items) |path| { ctx.allocator.free(path); } - workspace_paths.deinit(); - } - try FilterArg.getFilteredPackages(ctx, olddir, &workspace_paths); - - if (workspace_paths.items.len == 0) { - Output.prettyErrorln("error: No packages matched the filter", .{}); - Global.exit(1); + patterns.deinit(); } - - var ok = true; - for (workspace_paths.items) |path| { - Output.prettyErrorln("In {s}:", .{path}); - switch (bun.sys.chdir(path)) { - .err => |err| { - Output.prettyErrorln("error: Failed to change directory to {s} due to error {}", .{ path, err }); - Global.crash(); - }, - .result => {}, + // try FilterArg.getFilteredPackages(ctx, olddir, &workspace_paths); + try FilterArg.getGlobPatterns(ctx.allocator, ctx.log, &patterns, olddir); + + var package_json_iter = try FilterArg.PackageFilterIterator.init(ctx.allocator, patterns.items); + defer package_json_iter.deinit(); + while (try package_json_iter.next()) |package_json_path| { + const dirpath = std.fs.path.dirname(package_json_path) orelse Global.crash(); + const path = strings.withoutTrailingSlash(dirpath); + std.debug.print("package_json_path: {s}\n", .{package_json_path}); + if (filter_instance.has_name_filters) { + // TODO + Global.crash(); + // const name = try FilterArg.getPackageName(ctx.allocator, ctx.log, package_json_path); + } else { + const matches = filter_instance.matchesPath(path); + std.debug.print("{s} match result: {}\n", .{ path, matches }); } - fsinstance.top_level_dir = path; - const res = exec(ctx, bin_dirs_only, true) catch |err| { - Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); - continue; - }; - ok = ok and res.notFailure(); - // flush outputs to ensure that stdout and stderr are in the correct order for each of the paths - Output.flush(); - } - if (!ok) { - Global.exit(1); } + + // if (workspace_paths.items.len == 0) { + // Output.prettyErrorln("error: No packages matched the filter", .{}); + // Global.exit(1); + // } + + // var ok = true; + // for (workspace_paths.items) |path| { + // Output.prettyErrorln("In {s}:", .{path}); + // switch (bun.sys.chdir(path)) { + // .err => |err| { + // Output.prettyErrorln("error: Failed to change directory to {s} due to error {}", .{ path, err }); + // Global.crash(); + // }, + // .result => {}, + // } + // fsinstance.top_level_dir = path; + // const res = exec(ctx, bin_dirs_only, true) catch |err| { + // Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); + // continue; + // }; + // ok = ok and res.notFailure(); + // // flush outputs to ensure that stdout and stderr are in the correct order for each of the paths + // Output.flush(); + // } + // if (!ok) { + // Global.exit(1); + // } } pub fn exec(ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !ExecResult { From 5b19f985788cf336088a6b5b7135060c4e1b4510 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Thu, 18 Jan 2024 15:42:35 -0800 Subject: [PATCH 19/74] convert filtering to use iterators --- src/cli/filter_arg.zig | 16 ++++-- src/cli/run_command.zig | 117 +++++++++++++++++++++++++++------------- 2 files changed, 92 insertions(+), 41 deletions(-) diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index fa3f40d1717c1..82e0b955525e1 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -27,7 +27,7 @@ fn globIgnoreFn(val: []const u8) bool { const GlobWalker = Glob.GlobWalker_(globIgnoreFn); -pub fn getGlobPatterns(allocator: std.mem.Allocator, log: *bun.logger.Log, out_patterns: *std.ArrayList([]u8), workdir_: []const u8) !void { +pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logger.Log, out_patterns: *std.ArrayList([]u8), workdir_: []const u8, root_buf: *bun.PathBuffer) ![]const u8 { bun.JSAst.Expr.Data.Store.create(bun.default_allocator); bun.JSAst.Stmt.Data.Store.create(bun.default_allocator); @@ -96,8 +96,8 @@ pub fn getGlobPatterns(allocator: std.mem.Allocator, log: *bun.logger.Log, out_p }, } } - std.os.chdir(parent_trimmed) catch unreachable; - return; + @memcpy(root_buf[0..parent_trimmed.len], parent_trimmed); + return root_buf[0..parent_trimmed.len]; // const name = switch (name_prop.expr.data) { // .e_string => |n| n.data, @@ -122,6 +122,9 @@ pub fn getGlobPatterns(allocator: std.mem.Allocator, log: *bun.logger.Log, out_p } try out_patterns.append(try allocator.dupe(u8, "**/package.json")); + const root_dir = strings.withoutTrailingSlash(workdir_); + @memcpy(root_buf[0..root_dir.len], root_dir); + return root_buf[0..root_dir.len]; // if we were not able to find a workspace root, try globbing for package.json files @@ -332,6 +335,7 @@ pub fn getPackageName(allocator: std.mem.Allocator, log: *bun.logger.Log, path: pub const PackageFilterIterator = struct { patterns: []const []const u8, pattern_idx: usize = 0, + root_dir: []const u8, walker: GlobWalker = undefined, iter: GlobWalker.Iterator = undefined, @@ -340,11 +344,12 @@ pub const PackageFilterIterator = struct { arena: std.heap.ArenaAllocator, allocator: std.mem.Allocator, - pub fn init(allocator: std.mem.Allocator, patterns: []const []const u8) !PackageFilterIterator { + pub fn init(allocator: std.mem.Allocator, patterns: []const []const u8, root_dir: []const u8) !PackageFilterIterator { return PackageFilterIterator{ .patterns = patterns, .allocator = allocator, .arena = std.heap.ArenaAllocator.init(allocator), + .root_dir = root_dir, }; } @@ -372,7 +377,8 @@ pub const PackageFilterIterator = struct { fn initWalker(self: *PackageFilterIterator) !void { const pattern = self.patterns[self.pattern_idx]; var arena = self.arena; - const walker_init_res = try self.walker.init(&arena, pattern, true, true, false, true, true); + const cwd = try arena.allocator().dupe(u8, self.root_dir); + const walker_init_res = try self.walker.initWithCwd(&arena, pattern, cwd, true, true, false, true, true); switch (walker_init_res) { .err => |err| { // TODO diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index e4e82f9aa7540..8fafe6674b16b 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -13,6 +13,7 @@ const std = @import("std"); const uws = bun.uws; const JSC = bun.JSC; const WaiterThread = JSC.Subprocess.WaiterThread; +const Fs = @import("../fs.zig"); const lex = bun.js_lexer; const logger = bun.logger; @@ -1056,6 +1057,18 @@ pub const RunCommand = struct { } } + // const root_dir = try Fs.FileSystem.instance.fs.readDirectory( + // Fs.FileSystem.instance.top_level_dir, + // null, + // 0, + // true, + // ); + // switch (root_dir.*) { + // .entries => |e| { + // _ = e; + // }, + // } + var filter_instance = try FilterArg.FilterSet.init(ctx.allocator, ctx.filters, olddir); defer filter_instance.deinit(); @@ -1066,52 +1079,84 @@ pub const RunCommand = struct { } patterns.deinit(); } - // try FilterArg.getFilteredPackages(ctx, olddir, &workspace_paths); - try FilterArg.getGlobPatterns(ctx.allocator, ctx.log, &patterns, olddir); - var package_json_iter = try FilterArg.PackageFilterIterator.init(ctx.allocator, patterns.items); + var root_buf: bun.PathBuffer = undefined; + const resolve_root = try FilterArg.getCandidatePackagePatterns(ctx.allocator, ctx.log, &patterns, olddir, &root_buf); + + for (patterns.items) |path| { + std.debug.print("pattern: {s}\n", .{path}); + } + + var package_json_iter = try FilterArg.PackageFilterIterator.init(ctx.allocator, patterns.items, resolve_root); defer package_json_iter.deinit(); + + var arena = std.heap.ArenaAllocator.init(ctx.allocator); + var arena_alloc = arena.allocator(); + + var ok = true; + var any_match = false; while (try package_json_iter.next()) |package_json_path| { const dirpath = std.fs.path.dirname(package_json_path) orelse Global.crash(); const path = strings.withoutTrailingSlash(dirpath); std.debug.print("package_json_path: {s}\n", .{package_json_path}); - if (filter_instance.has_name_filters) { - // TODO - Global.crash(); - // const name = try FilterArg.getPackageName(ctx.allocator, ctx.log, package_json_path); - } else { - const matches = filter_instance.matchesPath(path); - std.debug.print("{s} match result: {}\n", .{ path, matches }); + const matches = matches: { + if (filter_instance.has_name_filters) { + // TODO load name from package.json + + const json_file = try std.fs.cwd().openFile( + package_json_path, + .{ .mode = .read_only }, + ); + defer json_file.close(); + + const json_stat_size = try json_file.getEndPos(); + const json_buf = try arena_alloc.alloc(u8, json_stat_size + 64); + defer _ = arena.reset(std.heap.ArenaAllocator.ResetMode.retain_capacity); + + const json_len = try json_file.preadAll(json_buf, 0); + const json_source = bun.logger.Source.initPathString(path, json_buf[0..json_len]); + + var parser = try json_parser.PackageJSONVersionChecker.init(arena_alloc, &json_source, ctx.log); + _ = try parser.parseExpr(); + if (!parser.has_found_name) { + // TODO warn of malformed package + continue; + } + break :matches filter_instance.matchesPathName(path, parser.found_name); + } else { + break :matches filter_instance.matchesPath(path); + } + }; + + std.debug.print("matches: {}\n", .{matches}); + if (!matches) continue; + any_match = true; + Output.prettyErrorln("In {s}:", .{path}); + // flush outputs to ensure that stdout and stderr are in the correct order for each of the paths + Output.flush(); + switch (bun.sys.chdir(path)) { + .err => |err| { + Output.prettyErrorln("error: Failed to change directory to {s} due to error {}", .{ path, err }); + Global.crash(); + }, + .result => {}, } + fsinstance.top_level_dir = path; + const res = exec(ctx, bin_dirs_only, true) catch |err| { + Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); + continue; + }; + ok = ok and res.notFailure(); } - // if (workspace_paths.items.len == 0) { - // Output.prettyErrorln("error: No packages matched the filter", .{}); - // Global.exit(1); - // } + if (!any_match) { + Output.prettyErrorln("error: No packages matched the filter", .{}); + Global.exit(1); + } - // var ok = true; - // for (workspace_paths.items) |path| { - // Output.prettyErrorln("In {s}:", .{path}); - // switch (bun.sys.chdir(path)) { - // .err => |err| { - // Output.prettyErrorln("error: Failed to change directory to {s} due to error {}", .{ path, err }); - // Global.crash(); - // }, - // .result => {}, - // } - // fsinstance.top_level_dir = path; - // const res = exec(ctx, bin_dirs_only, true) catch |err| { - // Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); - // continue; - // }; - // ok = ok and res.notFailure(); - // // flush outputs to ensure that stdout and stderr are in the correct order for each of the paths - // Output.flush(); - // } - // if (!ok) { - // Global.exit(1); - // } + if (!ok) { + Global.exit(1); + } } pub fn exec(ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !ExecResult { From a4d1b1797f30b9ef8932ba48280556fe42ca429c Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Thu, 18 Jan 2024 15:59:41 -0800 Subject: [PATCH 20/74] cleanup --- src/cli/filter_arg.zig | 135 +---------------------------------------- 1 file changed, 2 insertions(+), 133 deletions(-) diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index 82e0b955525e1..6a2792bb43958 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -98,147 +98,15 @@ pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logge } @memcpy(root_buf[0..parent_trimmed.len], parent_trimmed); return root_buf[0..parent_trimmed.len]; - - // const name = switch (name_prop.expr.data) { - // .e_string => |n| n.data, - // else => break, - // }; - - // _ = Package.processWorkspaceNamesArray( - // workspace_map, - // allocator, - // log, - // json_array, - // &json_source, - // prop.loc, - // null, - // ) catch |err| { - // return err; - // }; - - // // add the root package to the workspace map, too - // const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, name), .version = null, .name_loc = bun.logger.Loc.Empty }; - // try workspace_map.insert(try allocator.dupe(u8, parent_trimmed), entry); } + // if we were not able to find a workspace root, we simply glob for all package.json files try out_patterns.append(try allocator.dupe(u8, "**/package.json")); const root_dir = strings.withoutTrailingSlash(workdir_); @memcpy(root_buf[0..root_dir.len], root_dir); return root_buf[0..root_dir.len]; - - // if we were not able to find a workspace root, try globbing for package.json files - - // var walker = GlobWalker{}; - // var arena = std.heap.ArenaAllocator.init(allocator); - // const walker_init_res = try walker.init(&arena, "**/package.json", true, true, false, true, true); - // switch (walker_init_res) { - // .err => |err| { - // Output.prettyErrorln("Error: {}", .{err}); - // return; - // }, - // else => {}, - // } - // defer walker.deinit(true); - - // var iter = GlobWalker.Iterator{ .walker = &walker }; - // const iter_init_res = try iter.init(); - // switch (iter_init_res) { - // .err => |err| { - // Output.prettyErrorln("Error: {}", .{err}); - // return; - // }, - // else => {}, - // } - // defer iter.deinit(); - - // while (true) { - // const next = try iter.next(); - // const path = switch (next) { - // .err => |err| { - // Output.prettyErrorln("Error: {}", .{err}); - // continue; - // }, - // .result => |path| path orelse break, - // }; - - // const json_file = std.fs.cwd().openFile( - // path, - // .{ .mode = .read_only }, - // ) catch { - // continue; - // }; - // defer json_file.close(); - - // const json_stat_size = try json_file.getEndPos(); - // const json_buf = try allocator.alloc(u8, json_stat_size + 64); - // defer allocator.free(json_buf); - - // const json_len = try json_file.preadAll(json_buf, 0); - // const json_source = bun.logger.Source.initPathString(path, json_buf[0..json_len]); - - // var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, log); - // _ = try parser.parseExpr(); - // if (!parser.has_found_name) { - // continue; - // } - // const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, parser.found_name), .version = null, .name_loc = bun.logger.Loc.Empty }; - // const dirpath = std.fs.path.dirname(path) orelse continue; - // try workspace_map.insert(try allocator.dupe(u8, dirpath), entry); - // } } -// pub fn getFilteredPackages(ctx: bun.CLI.Command.Context, cwd: []const u8, paths: *std.ArrayList([]u8)) !void { -// // TODO in the future we can try loading the lockfile to get the workspace information more quickly -// // var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm); -// // const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); -// // if (load_lockfile == .not_found) { - -// // find the paths of all projects that match this filter - -// var wsmap = Package.WorkspaceMap.init(ctx.allocator); -// defer wsmap.deinit(); -// // find the root package.json of the workspace and load the child packages into workspace map -// findWorkspaceMembers(ctx.allocator, ctx.log, &wsmap, cwd) catch |err| { -// if (comptime bun.Environment.allow_assert) { -// if (@errorReturnTrace()) |trace| { -// std.debug.print("Error: {s}\n{}\n", .{ @errorName(err), trace }); -// } -// } -// Output.err(err, "Failed to find workspace root in {s}", .{cwd}); -// ctx.log.printForLogLevelColorsRuntime(Output.errorWriter(), Output.enable_ansi_colors) catch {}; -// Global.exit(1); -// }; - -// var pattern_stack = std.heap.stackFallback(4096, bun.default_allocator); -// var pattern = std.ArrayList(u32).init(pattern_stack.get()); -// defer pattern.deinit(); - -// // check each pattern against each package name -// for (ctx.filters) |pattern_utf8_| { -// var pattern_utf8 = pattern_utf8_; -// var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - -// const is_file_pattern = pattern_utf8.len > 0 and pattern_utf8[0] == '.'; -// if (is_file_pattern) { -// const parts = [_]string{pattern_utf8}; -// pattern_utf8 = bun.path.joinAbsStringBuf(cwd, &path_buf, &parts, .auto); -// } - -// pattern.clearRetainingCapacity(); -// var codepointer_iter = strings.UnsignedCodepointIterator.init(pattern_utf8); -// var cursor = strings.UnsignedCodepointIterator.Cursor{}; -// while (codepointer_iter.next(&cursor)) { -// try pattern.append(cursor.c); -// } -// for (wsmap.keys(), wsmap.values()) |path, entry| { -// const target = if (is_file_pattern) path else entry.name; -// if (Glob.matchImpl(pattern.items, target)) { -// try paths.append(try ctx.allocator.dupe(u8, path)); -// } -// } -// } -// } - pub const FilterSet = struct { allocator: std.mem.Allocator, filters: []Pattern, @@ -341,6 +209,7 @@ pub const PackageFilterIterator = struct { iter: GlobWalker.Iterator = undefined, valid: bool = false, + // TODO check if keeping the arena alloctor around is sound - GlobWalker copies the allocator, so it might get out of sync or leak memory arena: std.heap.ArenaAllocator, allocator: std.mem.Allocator, From 024d86ec9e5c5feba48d09984970b5677a52b2f5 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Thu, 18 Jan 2024 18:29:48 -0800 Subject: [PATCH 21/74] implement DirEntry access method for glob (currently crashing) --- src/cli/filter_arg.zig | 2 +- src/cli/run_command.zig | 29 ++++--- src/glob.zig | 182 +++++++++++++++++++++++++++++++++++----- 3 files changed, 182 insertions(+), 31 deletions(-) diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index 6a2792bb43958..9c6467c868458 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -25,7 +25,7 @@ fn globIgnoreFn(val: []const u8) bool { return false; } -const GlobWalker = Glob.GlobWalker_(globIgnoreFn); +const GlobWalker = Glob.GlobWalker_(globIgnoreFn, Glob.DirEntryAccessor); pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logger.Log, out_patterns: *std.ArrayList([]u8), workdir_: []const u8, root_buf: *bun.PathBuffer) ![]const u8 { bun.JSAst.Expr.Data.Store.create(bun.default_allocator); diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 8fafe6674b16b..8edae1af38842 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1057,17 +1057,22 @@ pub const RunCommand = struct { } } - // const root_dir = try Fs.FileSystem.instance.fs.readDirectory( - // Fs.FileSystem.instance.top_level_dir, - // null, - // 0, - // true, - // ); - // switch (root_dir.*) { - // .entries => |e| { - // _ = e; - // }, - // } + const root_dir = try Fs.FileSystem.instance.fs.readDirectory( + Fs.FileSystem.instance.top_level_dir, + null, + 0, + true, + ); + switch (root_dir.*) { + .entries => |e| { + // _ = e.data.get("package.json"); + _ = e; + }, + .err => |err| { + _ = err; + Global.crash(); + }, + } var filter_instance = try FilterArg.FilterSet.init(ctx.allocator, ctx.filters, olddir); defer filter_instance.deinit(); @@ -1142,6 +1147,8 @@ pub const RunCommand = struct { .result => {}, } fsinstance.top_level_dir = path; + // TODO is this necessary? which assignment is correct here? + fsinstance.fs.cwd = path; const res = exec(ctx, bin_dirs_only, true) catch |err| { Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); continue; diff --git a/src/glob.zig b/src/glob.zig index da8b0ae32329f..9d25eb781b169 100644 --- a/src/glob.zig +++ b/src/glob.zig @@ -109,7 +109,7 @@ const CursorState = struct { } }; -pub const BunGlobWalker = GlobWalker_(null); +pub const BunGlobWalker = GlobWalker_(null, SyscallAccessor); fn dummyFilterTrue(val: []const u8) bool { _ = val; @@ -121,8 +121,152 @@ fn dummyFilterFalse(val: []const u8) bool { return false; } +pub const SyscallAccessor = struct { + const Handle = struct { + value: bun.FileDescriptor, + + const zero = Handle{ .value = bun.FileDescriptor.zero }; + + pub fn isZero(this: Handle) bool { + return this.value == bun.FileDescriptor.zero; + } + + pub fn eql(this: Handle, other: Handle) bool { + return this.value == other.value; + } + }; + + const DirIter = struct { + value: DirIterator.WrappedIterator, + + pub inline fn next(self: *DirIter) Maybe(?DirIterator.IteratorResult) { + return self.value.next(); + } + + pub inline fn iterate(dir: Handle) DirIter { + return .{ .value = DirIterator.WrappedIterator.init(dir.value.asDir()) }; + } + }; + + pub fn open(path: [:0]const u8) Maybe(Handle) { + return switch (Syscall.open(path, std.os.O.DIRECTORY | std.os.O.RDONLY, 0)) { + .err => |err| .{ .err = err }, + .result => |fd| .{ .result = Handle{ .value = fd } }, + }; + } + + pub fn openat(handle: Handle, path: [:0]const u8) Maybe(Handle) { + return switch (Syscall.openat(handle.value, path, std.os.O.DIRECTORY | std.os.O.RDONLY, 0)) { + .err => |err| .{ .err = err }, + .result => |fd| .{ .result = Handle{ .value = fd } }, + }; + } + + pub fn close(handle: Handle) ?Syscall.Error { + return Syscall.close(handle.value); + } + + pub fn getcwd(path_buf: *[bun.MAX_PATH_BYTES]u8) Maybe([]const u8) { + return Syscall.getcwd(path_buf); + } +}; + +pub const DirEntryAccessor = struct { + const FS = bun.fs.FileSystem; + const Handle = struct { + value: ?*FS.DirEntry, + + const zero = Handle{ .value = null }; + + pub fn isZero(this: Handle) bool { + return this.value == null; + } + + pub fn eql(this: Handle, other: Handle) bool { + // TODO this might not be quite right, we're comparing pointers, not the underlying directory + return this.value == other.value; + } + }; + + // const IterType = @TypeOf((Handle{ .value = null }).value.?.data.iterator()); + + const DirIter = struct { + value: ?FS.DirEntry.EntryMap.Iterator, + + const IterResult = struct { + name: NameWrapper, + kind: std.fs.File.Kind, + + const NameWrapper = struct { + value: []const u8, + + pub fn slice(this: NameWrapper) []const u8 { + return this.value; + } + }; + }; + + pub inline fn next(self: *DirIter) Maybe(?IterResult) { + var value = self.value orelse return .{ .result = null }; + const nextval = value.next() orelse return .{ .result = null }; + const name = nextval.key_ptr.*; + const kind = nextval.value_ptr.*.kind(&FS.instance.fs, true); + const kind2 = switch (kind) { + .file => std.fs.File.Kind.file, + .dir => std.fs.File.Kind.directory, + }; + return .{ + .result = .{ + .name = IterResult.NameWrapper{ .value = name }, + .kind = kind2, + }, + }; + } + + pub inline fn iterate(dir: Handle) DirIter { + const entry = dir.value orelse return DirIter{ .value = null }; + return .{ .value = entry.data.iterator() }; + } + }; + + pub fn open(path: [:0]const u8) Maybe(Handle) { + return openat(Handle.zero, path); + } + + pub fn openat(handle: Handle, path_: [:0]const u8) Maybe(Handle) { + var path: []const u8 = path_; + var buf: bun.PathBuffer = undefined; + if (handle.value) |entry| { + path = bun.path.joinStringBuf(&buf, [_][]const u8{ entry.dir, path_ }, .auto); + } + // TODO handle errors correctly + const res = FS.instance.fs.readDirectory(path, null, 0, true) catch unreachable; + switch (res.*) { + .entries => |entry| { + return .{ .result = Handle{ .value = entry } }; + }, + .err => |err| { + // actually report the error if it's not a not found error + _ = err; + return .{ .err = Syscall.Error.fromCode(bun.C.E.NOTDIR, Syscall.Tag.open) }; + }, + } + } + + pub fn close(handle: Handle) ?Syscall.Error { + // TODO is this a noop? + _ = handle; + return null; + } + + pub fn getcwd(path_buf: *[bun.MAX_PATH_BYTES]u8) Maybe([]const u8) { + @memcpy(path_buf, bun.fs.FileSystem.instance.fs.cwd); + } +}; + pub fn GlobWalker_( comptime ignore_filter_fn: ?*const fn ([]const u8) bool, + comptime Accessor: type, ) type { const is_ignored: *const fn ([]const u8) bool = if (comptime ignore_filter_fn) |func| func else dummyFilterFalse; @@ -165,8 +309,8 @@ pub fn GlobWalker_( directory: Directory, const Directory = struct { - fd: bun.FileDescriptor, - iter: DirIterator.WrappedIterator, + fd: Accessor.Handle, + iter: Accessor.DirIter, path: [bun.MAX_PATH_BYTES]u8, dir_path: [:0]const u8, @@ -183,7 +327,7 @@ pub fn GlobWalker_( pub const Iterator = struct { walker: *GlobWalker, iter_state: IterState = .get_next, - cwd_fd: bun.FileDescriptor = .zero, + cwd_fd: Accessor.Handle = Accessor.Handle.zero, empty_dir_path: [0:0]u8 = [0:0]u8{}, /// This is to make sure in debug/tests that we are closing file descriptors /// We should only have max 2 open at a time. One for the cwd, and one for the @@ -195,7 +339,7 @@ pub fn GlobWalker_( const root_path = this.walker.cwd; @memcpy(path_buf[0..root_path.len], root_path[0..root_path.len]); path_buf[root_path.len] = 0; - const cwd_fd = switch (Syscall.open(@ptrCast(path_buf[0 .. root_path.len + 1]), std.os.O.DIRECTORY | std.os.O.RDONLY, 0)) { + const cwd_fd = switch (Accessor.open(@ptrCast(path_buf[0 .. root_path.len + 1]))) { .err => |err| return .{ .err = this.walker.handleSysErrWithPath(err, @ptrCast(path_buf[0 .. root_path.len + 1])) }, .result => |fd| fd, }; @@ -238,14 +382,14 @@ pub fn GlobWalker_( } pub fn closeCwdFd(this: *Iterator) void { - if (this.cwd_fd == .zero) return; - _ = Syscall.close(this.cwd_fd); + if (this.cwd_fd.isZero()) return; + _ = Accessor.close(this.cwd_fd); if (bun.Environment.allow_assert) this.fds_open -= 1; } - pub fn closeDisallowingCwd(this: *Iterator, fd: bun.FileDescriptor) void { - if (fd == this.cwd_fd) return; - _ = Syscall.close(fd); + pub fn closeDisallowingCwd(this: *Iterator, fd: Accessor.Handle) void { + if (fd.eql(this.cwd_fd)) return; + _ = Accessor.close(fd); if (bun.Environment.allow_assert) this.fds_open -= 1; } @@ -263,7 +407,7 @@ pub fn GlobWalker_( comptime root: bool, ) !Maybe(void) { this.iter_state = .{ .directory = .{ - .fd = .zero, + .fd = Accessor.Handle.zero, .iter = undefined, .path = undefined, .dir_path = undefined, @@ -298,10 +442,10 @@ pub fn GlobWalker_( this.iter_state.directory.is_last = component_idx == this.walker.patternComponents.items.len - 1; this.iter_state.directory.at_cwd = false; - const fd: bun.FileDescriptor = fd: { + const fd: Accessor.Handle = fd: { if (work_item.fd) |fd| break :fd fd; if (comptime root) { - if (had_dot_dot) break :fd switch (Syscall.openat(this.cwd_fd, dir_path, std.os.O.DIRECTORY | std.os.O.RDONLY, 0)) { + if (had_dot_dot) break :fd switch (Accessor.openat(this.cwd_fd, dir_path)) { .err => |err| return .{ .err = this.walker.handleSysErrWithPath(err, dir_path), }, @@ -315,7 +459,7 @@ pub fn GlobWalker_( break :fd this.cwd_fd; } - break :fd switch (Syscall.openat(this.cwd_fd, dir_path, std.os.O.DIRECTORY | std.os.O.RDONLY, 0)) { + break :fd switch (Accessor.openat(this.cwd_fd, dir_path)) { .err => |err| return .{ .err = this.walker.handleSysErrWithPath(err, dir_path), }, @@ -327,7 +471,7 @@ pub fn GlobWalker_( }; this.iter_state.directory.fd = fd; - const iterator = DirIterator.iterate(fd.asDir(), .u8); + const iterator = Accessor.DirIter.iterate(fd); this.iter_state.directory.iter = iterator; this.iter_state.directory.iter_closed = false; @@ -363,7 +507,7 @@ pub fn GlobWalker_( const is_last = component_idx == this.walker.patternComponents.items.len - 1; this.iter_state = .get_next; - const maybe_dir_fd: ?bun.FileDescriptor = switch (Syscall.openat(this.cwd_fd, symlink_full_path_z, std.os.O.DIRECTORY | std.os.O.RDONLY, 0)) { + const maybe_dir_fd: ?Accessor.Handle = switch (Accessor.openat(this.cwd_fd, symlink_full_path_z)) { .err => |err| brk: { if (@as(usize, @intCast(err.errno)) == @as(usize, @intFromEnum(bun.C.E.NOTDIR))) { break :brk null; @@ -530,7 +674,7 @@ pub fn GlobWalker_( idx: u32, kind: Kind, entry_start: u32 = 0, - fd: ?bun.FileDescriptor = null, + fd: ?Accessor.Handle = null, const Kind = enum { directory, @@ -545,7 +689,7 @@ pub fn GlobWalker_( }; } - fn newWithFd(path: []const u8, idx: u32, kind: Kind, fd: bun.FileDescriptor) WorkItem { + fn newWithFd(path: []const u8, idx: u32, kind: Kind, fd: Accessor.Handle) WorkItem { return .{ .path = path, .idx = idx, @@ -609,7 +753,7 @@ pub fn GlobWalker_( ) !Maybe(void) { errdefer arena.deinit(); var cwd: []const u8 = undefined; - switch (Syscall.getcwd(&this.pathBuf)) { + switch (Accessor.getcwd(&this.pathBuf)) { .err => |err| { return .{ .err = err }; }, From 9819a13e5ea2eb69c806fd2d4416230a09f0637d Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Fri, 19 Jan 2024 13:24:20 -0800 Subject: [PATCH 22/74] cleanup and fixes --- src/cli/run_command.zig | 6 --- src/glob.zig | 89 +++++++++++++++++++++++------------------ 2 files changed, 51 insertions(+), 44 deletions(-) diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 8edae1af38842..55c657ada5a16 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1088,10 +1088,6 @@ pub const RunCommand = struct { var root_buf: bun.PathBuffer = undefined; const resolve_root = try FilterArg.getCandidatePackagePatterns(ctx.allocator, ctx.log, &patterns, olddir, &root_buf); - for (patterns.items) |path| { - std.debug.print("pattern: {s}\n", .{path}); - } - var package_json_iter = try FilterArg.PackageFilterIterator.init(ctx.allocator, patterns.items, resolve_root); defer package_json_iter.deinit(); @@ -1103,7 +1099,6 @@ pub const RunCommand = struct { while (try package_json_iter.next()) |package_json_path| { const dirpath = std.fs.path.dirname(package_json_path) orelse Global.crash(); const path = strings.withoutTrailingSlash(dirpath); - std.debug.print("package_json_path: {s}\n", .{package_json_path}); const matches = matches: { if (filter_instance.has_name_filters) { // TODO load name from package.json @@ -1133,7 +1128,6 @@ pub const RunCommand = struct { } }; - std.debug.print("matches: {}\n", .{matches}); if (!matches) continue; any_match = true; Output.prettyErrorln("In {s}:", .{path}); diff --git a/src/glob.zig b/src/glob.zig index 9d25eb781b169..4d2dc8e7b6c15 100644 --- a/src/glob.zig +++ b/src/glob.zig @@ -122,6 +122,8 @@ fn dummyFilterFalse(val: []const u8) bool { } pub const SyscallAccessor = struct { + const count_fds = true; + const Handle = struct { value: bun.FileDescriptor, @@ -148,14 +150,14 @@ pub const SyscallAccessor = struct { } }; - pub fn open(path: [:0]const u8) Maybe(Handle) { + pub fn open(path: [:0]const u8) !Maybe(Handle) { return switch (Syscall.open(path, std.os.O.DIRECTORY | std.os.O.RDONLY, 0)) { .err => |err| .{ .err = err }, .result => |fd| .{ .result = Handle{ .value = fd } }, }; } - pub fn openat(handle: Handle, path: [:0]const u8) Maybe(Handle) { + pub fn openat(handle: Handle, path: [:0]const u8) !Maybe(Handle) { return switch (Syscall.openat(handle.value, path, std.os.O.DIRECTORY | std.os.O.RDONLY, 0)) { .err => |err| .{ .err = err }, .result => |fd| .{ .result = Handle{ .value = fd } }, @@ -173,6 +175,9 @@ pub const SyscallAccessor = struct { pub const DirEntryAccessor = struct { const FS = bun.fs.FileSystem; + + const count_fds = false; + const Handle = struct { value: ?*FS.DirEntry, @@ -184,12 +189,12 @@ pub const DirEntryAccessor = struct { pub fn eql(this: Handle, other: Handle) bool { // TODO this might not be quite right, we're comparing pointers, not the underlying directory + // On the other hand, DirEntries are only ever created once (per generation), so this should be fine? + // Realistically, as closing the handle is a no-op, this should be fine either way. return this.value == other.value; } }; - // const IterType = @TypeOf((Handle{ .value = null }).value.?.data.iterator()); - const DirIter = struct { value: ?FS.DirEntry.EntryMap.Iterator, @@ -207,20 +212,23 @@ pub const DirEntryAccessor = struct { }; pub inline fn next(self: *DirIter) Maybe(?IterResult) { - var value = self.value orelse return .{ .result = null }; - const nextval = value.next() orelse return .{ .result = null }; - const name = nextval.key_ptr.*; - const kind = nextval.value_ptr.*.kind(&FS.instance.fs, true); - const kind2 = switch (kind) { - .file => std.fs.File.Kind.file, - .dir => std.fs.File.Kind.directory, - }; - return .{ - .result = .{ - .name = IterResult.NameWrapper{ .value = name }, - .kind = kind2, - }, - }; + if (self.value) |*value| { + const nextval = value.next() orelse return .{ .result = null }; + const name = nextval.key_ptr.*; + const kind = nextval.value_ptr.*.kind(&FS.instance.fs, true); + const fskind = switch (kind) { + .file => std.fs.File.Kind.file, + .dir => std.fs.File.Kind.directory, + }; + return .{ + .result = .{ + .name = IterResult.NameWrapper{ .value = name }, + .kind = fskind, + }, + }; + } else { + return .{ .result = null }; + } } pub inline fn iterate(dir: Handle) DirIter { @@ -229,31 +237,35 @@ pub const DirEntryAccessor = struct { } }; - pub fn open(path: [:0]const u8) Maybe(Handle) { + pub fn open(path: [:0]const u8) !Maybe(Handle) { return openat(Handle.zero, path); } - pub fn openat(handle: Handle, path_: [:0]const u8) Maybe(Handle) { + pub fn openat(handle: Handle, path_: [:0]const u8) !Maybe(Handle) { var path: []const u8 = path_; var buf: bun.PathBuffer = undefined; - if (handle.value) |entry| { - path = bun.path.joinStringBuf(&buf, [_][]const u8{ entry.dir, path_ }, .auto); + if (path.len > 0 and path[0] != '/') { + if (handle.value) |entry| { + path = bun.path.joinStringBuf(&buf, [_][]const u8{ entry.dir, path }, .auto); + } } - // TODO handle errors correctly - const res = FS.instance.fs.readDirectory(path, null, 0, true) catch unreachable; + // TODO do we want to propagate ENOTDIR through the 'Maybe' to match the SyscallAccessor? + // The glob implementation specifically checks for this error when dealing with symlinks + // return .{ .err = Syscall.Error.fromCode(bun.C.E.NOTDIR, Syscall.Tag.open) }; + const res = FS.instance.fs.readDirectory(path, null, 0, false) catch |err| { + return err; + }; switch (res.*) { .entries => |entry| { return .{ .result = Handle{ .value = entry } }; }, .err => |err| { - // actually report the error if it's not a not found error - _ = err; - return .{ .err = Syscall.Error.fromCode(bun.C.E.NOTDIR, Syscall.Tag.open) }; + return err.original_err; }, } } - pub fn close(handle: Handle) ?Syscall.Error { + pub inline fn close(handle: Handle) ?Syscall.Error { // TODO is this a noop? _ = handle; return null; @@ -269,6 +281,7 @@ pub fn GlobWalker_( comptime Accessor: type, ) type { const is_ignored: *const fn ([]const u8) bool = if (comptime ignore_filter_fn) |func| func else dummyFilterFalse; + const count_fds = Accessor.count_fds and bun.Environment.allow_assert; return struct { const GlobWalker = @This(); @@ -332,19 +345,19 @@ pub fn GlobWalker_( /// This is to make sure in debug/tests that we are closing file descriptors /// We should only have max 2 open at a time. One for the cwd, and one for the /// directory being iterated on. - fds_open: if (bun.Environment.allow_assert) usize else u0 = 0, + fds_open: if (count_fds) usize else u0 = 0, pub fn init(this: *Iterator) !Maybe(void) { var path_buf: *[bun.MAX_PATH_BYTES]u8 = &this.walker.pathBuf; const root_path = this.walker.cwd; @memcpy(path_buf[0..root_path.len], root_path[0..root_path.len]); path_buf[root_path.len] = 0; - const cwd_fd = switch (Accessor.open(@ptrCast(path_buf[0 .. root_path.len + 1]))) { + const cwd_fd = switch (try Accessor.open(path_buf[0..root_path.len :0])) { .err => |err| return .{ .err = this.walker.handleSysErrWithPath(err, @ptrCast(path_buf[0 .. root_path.len + 1])) }, .result => |fd| fd, }; - if (bun.Environment.allow_assert) { + if (comptime count_fds) { this.fds_open += 1; } @@ -376,7 +389,7 @@ pub fn GlobWalker_( } } - if (bun.Environment.allow_assert) { + if (comptime count_fds) { std.debug.assert(this.fds_open == 0); } } @@ -384,17 +397,17 @@ pub fn GlobWalker_( pub fn closeCwdFd(this: *Iterator) void { if (this.cwd_fd.isZero()) return; _ = Accessor.close(this.cwd_fd); - if (bun.Environment.allow_assert) this.fds_open -= 1; + if (comptime count_fds) this.fds_open -= 1; } pub fn closeDisallowingCwd(this: *Iterator, fd: Accessor.Handle) void { if (fd.eql(this.cwd_fd)) return; _ = Accessor.close(fd); - if (bun.Environment.allow_assert) this.fds_open -= 1; + if (comptime count_fds) this.fds_open -= 1; } pub fn bumpOpenFds(this: *Iterator) void { - if (bun.Environment.allow_assert) { + if (comptime count_fds) { this.fds_open += 1; // If this is over 2 then this means that there is a bug in the iterator code std.debug.assert(this.fds_open <= 2); @@ -445,7 +458,7 @@ pub fn GlobWalker_( const fd: Accessor.Handle = fd: { if (work_item.fd) |fd| break :fd fd; if (comptime root) { - if (had_dot_dot) break :fd switch (Accessor.openat(this.cwd_fd, dir_path)) { + if (had_dot_dot) break :fd switch (try Accessor.openat(this.cwd_fd, dir_path)) { .err => |err| return .{ .err = this.walker.handleSysErrWithPath(err, dir_path), }, @@ -459,7 +472,7 @@ pub fn GlobWalker_( break :fd this.cwd_fd; } - break :fd switch (Accessor.openat(this.cwd_fd, dir_path)) { + break :fd switch (try Accessor.openat(this.cwd_fd, dir_path)) { .err => |err| return .{ .err = this.walker.handleSysErrWithPath(err, dir_path), }, @@ -507,7 +520,7 @@ pub fn GlobWalker_( const is_last = component_idx == this.walker.patternComponents.items.len - 1; this.iter_state = .get_next; - const maybe_dir_fd: ?Accessor.Handle = switch (Accessor.openat(this.cwd_fd, symlink_full_path_z)) { + const maybe_dir_fd: ?Accessor.Handle = switch (try Accessor.openat(this.cwd_fd, symlink_full_path_z)) { .err => |err| brk: { if (@as(usize, @intCast(err.errno)) == @as(usize, @intFromEnum(bun.C.E.NOTDIR))) { break :brk null; From c8c21bfdcb887113d2f8d9307e2a20cb39145176 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Fri, 19 Jan 2024 15:59:18 -0800 Subject: [PATCH 23/74] run js files in subprocess when filter flag passed --- src/cli.zig | 2 +- src/cli/run_command.zig | 58 +++++++++++++++++++++++++++++++++++------ 2 files changed, 51 insertions(+), 9 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index eacc0e924de98..cc7ce12618015 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -1737,7 +1737,7 @@ pub const Command = struct { if (ctx.filters.len > 0) { Output.prettyln("warn: Filters are ignored for auto command", .{}); } - switch (try RunCommand.exec(ctx, true, false)) { + switch (try RunCommand.exec(ctx, true, false, false)) { .failure => {}, .ok => return, .code => |code| { diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 55c657ada5a16..608d7af510bd9 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -477,6 +477,47 @@ pub const RunCommand = struct { return true; } + pub fn bootMaybeSubprocess(ctx_: Command.Context, entry_path: string, comptime subprocess: bool) !ExecResult { + if (comptime subprocess) { + const pid = std.c.fork(); + if (pid == 0) { + // child + Run.boot(ctx_, entry_path) catch { + Global.exit(1); + }; + Global.exit(0); + } else { + //p arent + var cstatus: c_int = 0; + switch (std.c.getErrno(std.c.waitpid(pid, &cstatus, 0))) { + .SUCCESS => { + const status: u32 = @intCast(cstatus); + const os = std.os; + if (os.W.IFEXITED(status)) { + const code = os.W.EXITSTATUS(status); + return ExecResult.withCode(@intCast(code)); + // Term{ .Exited = os.W.EXITSTATUS(status) }; + } else if (os.W.IFSIGNALED(status)) { + const code = os.W.TERMSIG(status); + Global.exit(128 + @as(u8, @as(u7, @truncate(code)))); + } else if (os.W.IFSTOPPED(status)) { + const code = os.W.STOPSIG(status); + Global.exit(128 + @as(u8, @as(u7, @truncate(code)))); + } else { + // shouldn't be possible, but in case we get an invalid status code just ignore it + return ExecResult.ok; + } + }, + // if the waitpid call failed, we can't get the exit code + else => return ExecResult.failure, + } + } + } else { + try Run.boot(ctx_, entry_path); + return ExecResult.ok; + } + } + pub fn ls(ctx: Command.Context) !void { const args = ctx.args; @@ -1035,7 +1076,7 @@ pub const RunCommand = struct { pub fn execAll(ctx: Command.Context, comptime bin_dirs_only: bool) !void { // without filters just behave like normal exec if (ctx.filters.len == 0) { - switch (try exec(ctx, bin_dirs_only, true)) { + switch (try exec(ctx, bin_dirs_only, true, false)) { .ok => return, .failure => Global.exit(1), .code => |code| Global.exit(code), @@ -1143,7 +1184,7 @@ pub const RunCommand = struct { fsinstance.top_level_dir = path; // TODO is this necessary? which assignment is correct here? fsinstance.fs.cwd = path; - const res = exec(ctx, bin_dirs_only, true) catch |err| { + const res = exec(ctx, bin_dirs_only, true, true) catch |err| { Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); continue; }; @@ -1160,7 +1201,7 @@ pub const RunCommand = struct { } } - pub fn exec(ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool) !ExecResult { + pub fn exec(ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool, comptime subprocess: bool) !ExecResult { var ctx = ctx_; // Step 1. Figure out what we're trying to run @@ -1182,7 +1223,7 @@ pub const RunCommand = struct { if ((script_name_to_search.len == 1 and script_name_to_search[0] == '.') or (script_name_to_search.len == 2 and @as(u16, @bitCast(script_name_to_search[0..2].*)) == @as(u16, @bitCast([_]u8{ '.', '/' })))) { - Run.boot(ctx, ".") catch |err| { + return bootMaybeSubprocess(ctx, ".", subprocess) catch |err| { if (Output.enable_ansi_colors) { ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; } else { @@ -1198,7 +1239,7 @@ pub const RunCommand = struct { } return ExecResult.failure; }; - return ExecResult.ok; + // return ExecResult.ok; } if (log_errors or force_using_bun) { @@ -1282,7 +1323,8 @@ pub const RunCommand = struct { Global.configureAllocator(.{ .long_running = true }); const out_path = ctx.allocator.dupe(u8, file_path) catch unreachable; - Run.boot(ctx, out_path) catch |err| { + + return bootMaybeSubprocess(ctx, out_path, subprocess) catch |err| { if (Output.enable_ansi_colors) { ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; } else { @@ -1299,7 +1341,7 @@ pub const RunCommand = struct { return ExecResult.failure; }; - return ExecResult.ok; + // return ExecResult.ok; } } } @@ -1369,7 +1411,7 @@ pub const RunCommand = struct { } else if ((script_name_to_search.len > 1 and script_name_to_search[0] == '/') or (script_name_to_search.len > 2 and script_name_to_search[0] == '.' and script_name_to_search[1] == '/')) { - Run.boot(ctx, ctx.allocator.dupe(u8, script_name_to_search) catch unreachable) catch |err| { + return bootMaybeSubprocess(ctx, try ctx.allocator.dupe(u8, script_name_to_search), subprocess) catch |err| { if (Output.enable_ansi_colors) { ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), true) catch {}; } else { From 25577632a805f49c084ac061ceda201e30bba26c Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Fri, 19 Jan 2024 16:20:19 -0800 Subject: [PATCH 24/74] clean up dead code --- src/cli/run_command.zig | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 608d7af510bd9..87e8db3ef8ee2 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1061,9 +1061,6 @@ pub const RunCommand = struct { return ExecResult{ .code = code }; } - // const ok = ExecResult{.ok = void}; - // const failure = ExecResult.failure; - pub fn notFailure(self: ExecResult) bool { return switch (self) { .ok => true, @@ -1098,23 +1095,6 @@ pub const RunCommand = struct { } } - const root_dir = try Fs.FileSystem.instance.fs.readDirectory( - Fs.FileSystem.instance.top_level_dir, - null, - 0, - true, - ); - switch (root_dir.*) { - .entries => |e| { - // _ = e.data.get("package.json"); - _ = e; - }, - .err => |err| { - _ = err; - Global.crash(); - }, - } - var filter_instance = try FilterArg.FilterSet.init(ctx.allocator, ctx.filters, olddir); defer filter_instance.deinit(); From 5e467cc7ab420a8085092798c7b025de663fbae5 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Fri, 19 Jan 2024 16:48:46 -0800 Subject: [PATCH 25/74] fix fd leak in run_command.zig --- src/cli/run_command.zig | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 87e8db3ef8ee2..0f2ecbeaddcd5 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -487,7 +487,7 @@ pub const RunCommand = struct { }; Global.exit(0); } else { - //p arent + // parent var cstatus: c_int = 0; switch (std.c.getErrno(std.c.waitpid(pid, &cstatus, 0))) { .SUCCESS => { @@ -1265,6 +1265,7 @@ pub const RunCommand = struct { }; const file = file_ catch break :possibly_open_with_bun_js; + defer file.close(); if (!force_using_bun) { // Due to preload, we don't know if they intend to run @@ -1320,8 +1321,6 @@ pub const RunCommand = struct { } return ExecResult.failure; }; - - // return ExecResult.ok; } } } From 1a3fbc194390ecf78818d4dfe99d43db2e8e7ea7 Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Sat, 20 Jan 2024 02:10:19 +0000 Subject: [PATCH 26/74] [autofix.ci] apply automated fixes --- src/glob.zig | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/glob.zig b/src/glob.zig index 1724e88ef2e2d..3b1e7524dc2e9 100644 --- a/src/glob.zig +++ b/src/glob.zig @@ -109,7 +109,6 @@ const CursorState = struct { } }; - pub const BunGlobWalker = GlobWalker_(null, SyscallAccessor, false); fn dummyFilterTrue(val: []const u8) bool { @@ -283,7 +282,7 @@ pub fn GlobWalker_( comptime sentinel: bool, ) type { const is_ignored: *const fn ([]const u8) bool = if (comptime ignore_filter_fn) |func| func else dummyFilterFalse; - + const count_fds = Accessor.count_fds and bun.Environment.allow_assert; const stdJoin = comptime if (!sentinel) std.fs.path.join else std.fs.path.joinZ; From 9a7ba61a81505b87083ee85cb13dd9a90ad38d81 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Fri, 19 Jan 2024 18:18:20 -0800 Subject: [PATCH 27/74] fix issues after merge --- src/bun_js.zig | 2 +- src/cli/filter_arg.zig | 2 +- src/deps/tinycc | 2 +- src/shell/interpreter.zig | 2 +- test/cli/run/filter-workspace.test.ts | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/bun_js.zig b/src/bun_js.zig index 9a6163ca66fab..3fa83ff0ba461 100644 --- a/src/bun_js.zig +++ b/src/bun_js.zig @@ -147,7 +147,7 @@ pub const Run = struct { ); try bundle.runEnvLoader(); const mini = JSC.MiniEventLoop.initGlobal(bundle.env); - mini.top_level_dir = ctx.args.absolute_working_dir orelse ""; + mini.top_level_dir = ctx.args.cwd_override orelse ""; try bun.shell.InterpreterMini.initAndRunFromFile(mini, entry_path); return; } diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index 9c6467c868458..10bf105b4d37d 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -25,7 +25,7 @@ fn globIgnoreFn(val: []const u8) bool { return false; } -const GlobWalker = Glob.GlobWalker_(globIgnoreFn, Glob.DirEntryAccessor); +const GlobWalker = Glob.GlobWalker_(globIgnoreFn, Glob.DirEntryAccessor, false); pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logger.Log, out_patterns: *std.ArrayList([]u8), workdir_: []const u8, root_buf: *bun.PathBuffer) ![]const u8 { bun.JSAst.Expr.Data.Store.create(bun.default_allocator); diff --git a/src/deps/tinycc b/src/deps/tinycc index ab631362d8393..2d3ad9e0d3219 160000 --- a/src/deps/tinycc +++ b/src/deps/tinycc @@ -1 +1 @@ -Subproject commit ab631362d839333660a265d3084d8ff060b96753 +Subproject commit 2d3ad9e0d32194ad7fd867b66ebe218dcc8cb5cd diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index 09f27a75388e8..50cdd2c348350 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -48,7 +48,7 @@ const Token = shell.Token; const ShellError = shell.ShellError; const ast = shell.AST; -const GlobWalker = @import("../glob.zig").GlobWalker_(null, true); +const GlobWalker = Glob.GlobWalker_(null, Glob.SyscallAccessor, true); pub const SUBSHELL_TODO_ERROR = "Subshells are not implemented, please open GitHub issue."; const stdin_no = 0; diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index 9b6bbdd6fc314..b60e510ab408f 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -121,7 +121,7 @@ describe("bun", () => { for (let d of dirs) { test(`resolve '*' from ${d}`, () => { - runInCwdSuccess(d, "*", [/scripta/, /scriptb/, /scriptc/, /rootscript/]); + runInCwdSuccess(d, "*", [/scripta/, /scriptb/, /scriptc/]); }); test(`resolve all from ${d}`, () => { runInCwdSuccess(d, names, [/scripta/, /scriptb/, /scriptc/]); From 63124c3e0374cffafd3ad523553ca69a4f335844 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Sat, 20 Jan 2024 14:27:06 -0800 Subject: [PATCH 28/74] use posix-spawn in runBinary, fix resource PATH variable resource leak --- src/cli/run_command.zig | 130 +++++++++++++++++++++++++--------------- 1 file changed, 82 insertions(+), 48 deletions(-) diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 9d98a111fff77..d38861290a906 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -370,7 +370,6 @@ pub const RunCommand = struct { /// When printing error messages from 'bun run', attribute bun overridden node.js to bun /// This prevents '"node" exited with ...' when it was actually bun. - /// As of writing this is only used for 'runBinary' fn basenameOrBun(str: []const u8) []const u8 { if (strings.eqlComptime(str, bun_node_dir ++ "/node")) { return "bun"; @@ -386,46 +385,93 @@ pub const RunCommand = struct { passthrough: []const string, original_script_for_bun_run: ?[]const u8, ) !ExecResult { - var argv_ = [_]string{executable}; - var argv: []const string = &argv_; - - if (passthrough.len > 0) { - var array_list = std.ArrayList(string).init(ctx.allocator); - try array_list.append(executable); - try array_list.appendSlice(passthrough); - argv = try array_list.toOwnedSlice(); + var arena = std.heap.ArenaAllocator.init(ctx.allocator); + defer arena.deinit(); + const alloc = arena.allocator(); + + const argc = passthrough.len + 1; + var argv = try alloc.allocSentinel(?[*:0]const u8, argc, null); + var name = try alloc.allocSentinel(u8, executable.len, 0); + @memcpy(name[0..executable.len], executable); + argv[0] = name; //name[0..executable.len :0]; + + for (1.., passthrough) |i, p| { + var arg = try alloc.allocSentinel(u8, p.len, 0); + @memcpy(arg[0..p.len], p); + argv[i] = arg; } - var child_process = std.ChildProcess.init(argv, ctx.allocator); + const envc = env.map.map.count(); + var envp = try alloc.allocSentinel(?[*:0]const u8, envc, null); + var offset: usize = 0; + var iter = env.map.map.iterator(); + while (iter.next()) |entry| : (offset += 1) { + const len = entry.key_ptr.len + 1 + entry.value_ptr.value.len; + var envvar = try alloc.allocSentinel(u8, len, 0); + @memcpy(envvar[0..entry.key_ptr.len], entry.key_ptr.*); + envvar[entry.key_ptr.len] = '='; + @memcpy(envvar[entry.key_ptr.len + 1 .. len], entry.value_ptr.value); + envp[offset] = envvar; + } - var buf_map = try env.map.cloneToEnvMap(ctx.allocator); - child_process.cwd = cwd; - child_process.env_map = &buf_map; - child_process.stderr_behavior = .Inherit; - child_process.stdin_behavior = .Inherit; - child_process.stdout_behavior = .Inherit; const silent = ctx.debug.silent; - const result = child_process.spawnAndWait() catch |err| { - if (err == error.AccessDenied) { + var actions = try PosixSpawn.Actions.init(); + try actions.inherit(bun.posix.STDIN_FD); + try actions.inherit(bun.posix.STDOUT_FD); + try actions.inherit(bun.posix.STDERR_FD); + try actions.chdir(cwd); + + var execpath_buf: bun.PathBuffer = undefined; + @memcpy(execpath_buf[0..executable.len], executable); + execpath_buf[executable.len] = 0; + const execpath = execpath_buf[0..executable.len :0]; + + const childpid = switch (PosixSpawn.spawnZ(execpath, actions, null, argv, envp)) { + .result => |pid| pid, + .err => |err| { if (comptime Environment.isPosix) { - var stat = std.mem.zeroes(std.c.Stat); - const rc = bun.C.stat(executable[0.. :0].ptr, &stat); - if (rc == 0) { - if (std.os.S.ISDIR(stat.mode)) { - if (!silent) - Output.prettyErrorln("error: Failed to run directory \"{s}\"\n", .{executable}); - if (@errorReturnTrace()) |trace| { - std.debug.dumpStackTrace(trace.*); + const EACCES: u16 = @intCast(@intFromEnum(bun.C.E.ACCES)); + if (@as(u16, err.errno) == EACCES) { + var stat = std.mem.zeroes(std.c.Stat); + const rc = bun.C.stat(executable[0.. :0].ptr, &stat); + if (rc == 0) { + if (std.os.S.ISDIR(stat.mode)) { + if (!silent) + Output.prettyErrorln("error: Failed to run directory \"{s}\"\n", .{executable}); + if (@errorReturnTrace()) |trace| { + std.debug.dumpStackTrace(trace.*); + } + return ExecResult.failure; } - return ExecResult.failure; } } } - } - Output.prettyErrorln("error: Failed to run \"{s}\" due to error {s}", .{ basenameOrBun(executable), @errorName(err) }); - return ExecResult.failure; + Output.prettyErrorln("error: Failed to run \"{s}\" due to error: {}", .{ basenameOrBun(executable), err }); + return ExecResult.failure; + }, }; + + const status = switch (PosixSpawn.waitpid(childpid, 0)) { + .result => |res| res.status, + .err => |err| { + Output.prettyErrorln("error: Failed to run \"{s}\" due to error: {}", .{ basenameOrBun(executable), err }); + return ExecResult.failure; + }, + }; + + const os = std.os; + const Term = std.ChildProcess.Term; + const result = + if (os.W.IFEXITED(status)) + Term{ .Exited = os.W.EXITSTATUS(status) } + else if (os.W.IFSIGNALED(status)) + Term{ .Signal = os.W.TERMSIG(status) } + else if (os.W.IFSTOPPED(status)) + Term{ .Stopped = os.W.STOPSIG(status) } + else + Term{ .Unknown = status }; + switch (result) { .Exited => |code| { if (!silent) { @@ -473,8 +519,6 @@ pub const RunCommand = struct { return ExecResult.failure; }, } - - return true; } pub fn bootMaybeSubprocess(ctx_: Command.Context, entry_path: string, comptime subprocess: bool) !ExecResult { @@ -1334,6 +1378,12 @@ pub const RunCommand = struct { const root_dir_info = try configureEnvForRun(ctx, &this_bundler, null, log_errors); try configurePathForRun(ctx, root_dir_info, &this_bundler, &ORIGINAL_PATH, root_dir_info.abs_path, force_using_bun); this_bundler.env.map.put("npm_lifecycle_event", script_name_to_search) catch unreachable; + defer { + this_bundler.env.map.put("PATH", ORIGINAL_PATH) catch |err| { + Output.prettyErrorln("error: Failed to restore PATH due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + } if (script_name_to_search.len == 0) { // naked "bun run" @@ -1430,22 +1480,6 @@ pub const RunCommand = struct { if (path_for_which.len > 0) { if (which(&path_buf, path_for_which, this_bundler.fs.top_level_dir, script_name_to_search)) |destination| { - // var file = std.fs.openFileAbsoluteZ(destination, .{ .mode = .read_only }) catch |err| { - // if (!log_errors) return false; - - // Output.prettyErrorln("error: {s} opening file: \"{s}\"", .{ err, std.mem.span(destination) }); - // Output.flush(); - // return err; - // }; - // // var outbuf = bun.getFdPath(file.handle, &path_buf2) catch |err| { - // // if (!log_errors) return false; - // // Output.prettyErrorln("error: {s} resolving file: \"{s}\"", .{ err, std.mem.span(destination) }); - // // Output.flush(); - // // return err; - // // }; - - // // file.close(); - const out = bun.asByteSlice(destination); return try runBinary( ctx, From 9af23adb39d3455bcfde8870cb887b3fb66aad91 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Sat, 20 Jan 2024 15:10:51 -0800 Subject: [PATCH 29/74] move filter argument to runtime category --- src/cli.zig | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index 9425a1890e230..6f4c3f036cbd7 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -151,9 +151,6 @@ pub const Arguments = struct { const base_params_ = [_]ParamType{ clap.parseParam("--env-file ... Load environment variables from the specified file(s)") catch unreachable, clap.parseParam("--cwd Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable, - // clap.parseParam("-w, --workspace Perform the command on the specified workspace member package") catch unreachable, - clap.parseParam("--filter ... Perform the command on all workspace member packages that match the pattern") catch unreachable, - // clap.parseParam("--fail-if-no-match Fail if no packages match the filter") catch unreachable, clap.parseParam("-c, --config ? Specify path to Bun config file. Default $cwd/bunfig.toml") catch unreachable, clap.parseParam("-h, --help Display this menu and exit") catch unreachable, clap.parseParam("...") catch unreachable, @@ -200,6 +197,7 @@ pub const Arguments = struct { const auto_params = auto_only_params ++ runtime_params_ ++ transpiler_params_ ++ base_params_; const run_only_params = [_]ParamType{ + clap.parseParam("--filter ... Run the script or executable in each workspace package matching the filter pattern") catch unreachable, clap.parseParam("--silent Don't print the script command") catch unreachable, clap.parseParam("-b, --bun Force a script or package to use Bun's runtime instead of Node.js (via symlinking node)") catch unreachable, }; @@ -430,7 +428,9 @@ pub const Arguments = struct { cwd = try bun.getcwd(&cwd_buf); } - ctx.filters = args.options("--filter"); + if (cmd == .RunCommand) { + ctx.filters = args.options("--filter"); + } if (cmd == .TestCommand) { if (args.option("--timeout")) |timeout_ms| { From 26a720defe3a0962f8b1062734b221b838613d4a Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Sun, 21 Jan 2024 11:41:30 -0800 Subject: [PATCH 30/74] fix test harness --- test/harness.ts | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/test/harness.ts b/test/harness.ts index df7b74e7970b9..13763af4608bf 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -97,12 +97,17 @@ export function tempDirWithFiles(basename: string, files: DirectoryTree): string function makeTree(base: string, tree: DirectoryTree) { for (const [name, contents] of Object.entries(tree)) { + let joined = path.join(base, name); + if (name.includes("/")) { + let dirname = path.dirname(name); + fs.mkdirSync(path.join(base, dirname), { recursive: true }); + } if (typeof contents === "object") { - fs.mkdirSync(path.join(base, name), { recursive: true }); - makeTree(path.join(base, name), contents); + fs.mkdirSync(joined); + makeTree(joined, contents); continue; } - fs.writeFileSync(path.join(base, name), contents); + fs.writeFileSync(joined, contents); } } From 342687367d2b09b94b78cb08ebed475760a649a7 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Mon, 22 Jan 2024 10:00:31 -0800 Subject: [PATCH 31/74] add js and binary tests to filter-workspace --- test/cli/run/filter-workspace.test.ts | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index b60e510ab408f..7874600c906c8 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -7,6 +7,7 @@ import { join } from "path"; let cwd_root = tempDirWithFiles("testworkspace", { "packages": { "pkga": { + "index.js": "console.log('pkga');", "package.json": JSON.stringify({ "name": "pkga", "scripts": { @@ -15,6 +16,7 @@ let cwd_root = tempDirWithFiles("testworkspace", { }), }, "pkgb": { + "index.js": "console.log('pkgb');", "package.json": JSON.stringify({ "name": "pkgb", "scripts": { @@ -23,6 +25,7 @@ let cwd_root = tempDirWithFiles("testworkspace", { }), }, "dirname": { + "index.js": "console.log('pkgc');", "package.json": JSON.stringify({ "name": "pkgc", "scripts": { @@ -50,6 +53,7 @@ function runInCwdSuccess( pattern: string | string[], target_pattern: RegExp | RegExp[], antipattern?: RegExp | RegExp[], + command: string[] = ["present"], ) { let cmd = [bunExe(), "run"]; if (pattern instanceof Array) { @@ -59,7 +63,9 @@ function runInCwdSuccess( } else { cmd.push("--filter", pattern); } - cmd.push("present"); + for (let c of command) { + cmd.push(c); + } const { exitCode, stdout, stderr } = spawnSync({ cwd: cwd, cmd: cmd, @@ -138,6 +144,14 @@ describe("bun", () => { runInCwdSuccess(cwd_root, "./packages/pkg*", [/scripta/, /scriptb/], /scriptc/); }); + test("resolve and run all js scripts", () => { + runInCwdSuccess(cwd_root, "*", [/pkga/, /pkgb/, /pkgc/], [], ["index.js"]); + }); + + test("run binaries in package directories", () => { + runInCwdSuccess(cwd_root, "*", [/pkga/, /pkgb/, /dirname/], [], ["pwd"]); + }); + test("should error with missing script", () => { runInCwdFailure(cwd_root, "*", "notpresent", /found/); }); From a363f48643409d76e575b7d9c93ca8c597dcc9de Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 18:01:37 +0000 Subject: [PATCH 32/74] [autofix.ci] apply automated fixes --- bench/snippets/shell-spawn.mjs | 37 ++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 bench/snippets/shell-spawn.mjs diff --git a/bench/snippets/shell-spawn.mjs b/bench/snippets/shell-spawn.mjs new file mode 100644 index 0000000000000..aa4da66eeb0fd --- /dev/null +++ b/bench/snippets/shell-spawn.mjs @@ -0,0 +1,37 @@ +import { $ as zx } from "zx"; +import { $ as execa$ } from "execa"; +import { bench, run, group } from "./runner.mjs"; + +const execa = execa$({ stdio: "ignore", cwd: import.meta.dirname }); + +group("echo hi", () => { + if (typeof Bun !== "undefined") + bench("$`echo hi`", async () => { + await Bun.$`echo hi`.quiet(); + }); + + bench("execa`echo hi`", async () => { + await execa`echo hi`; + }); + + bench("zx`echo hi`", async () => { + await zx`echo hi`.quiet(); + }); +}); + +group("ls .", () => { + if (typeof Bun !== "undefined") + bench("$`ls .`", async () => { + await Bun.$`ls .`.quiet(); + }); + + bench("execa`ls .`", async () => { + await execa`ls .`; + }); + + bench("zx`ls .`", async () => { + await zx`ls .`.quiet(); + }); +}); + +await run(); From 5b91ca381ff4a589dd24d476826074264bf50309 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Thu, 4 Apr 2024 13:13:31 -0700 Subject: [PATCH 33/74] fix compile after merge --- src/bun_js.zig | 2 +- src/cli/run_command.zig | 192 ++++++++++++++++++++++++++++++++++++++-- src/glob.zig | 14 ++- 3 files changed, 190 insertions(+), 18 deletions(-) diff --git a/src/bun_js.zig b/src/bun_js.zig index ec76be444d220..a98af5d5a7e99 100644 --- a/src/bun_js.zig +++ b/src/bun_js.zig @@ -135,7 +135,7 @@ pub const Run = struct { ); try bundle.runEnvLoader(false); const mini = JSC.MiniEventLoop.initGlobal(bundle.env); - mini.top_level_dir = ctx.args.absolute_working_dir orelse ""; + mini.top_level_dir = ctx.args.cwd_override orelse ""; return bun.shell.Interpreter.initAndRunFromFile(ctx, mini, entry_path); } diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index bbd2e894781d7..821258143ae02 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1281,12 +1281,183 @@ pub const RunCommand = struct { Output.flush(); } + pub fn bootMaybeSubprocess(ctx_: Command.Context, entry_path: string, comptime subprocess: bool) !ExecResult { + if (comptime subprocess) { + const pid = std.c.fork(); + if (pid == 0) { + // child + Run.boot(ctx_, entry_path) catch { + Global.exit(1); + }; + Global.exit(0); + } else { + // parent + var cstatus: c_int = 0; + switch (std.c.getErrno(std.c.waitpid(pid, &cstatus, 0))) { + .SUCCESS => { + const status: u32 = @intCast(cstatus); + const os = std.os; + if (os.W.IFEXITED(status)) { + const code = os.W.EXITSTATUS(status); + return ExecResult.withCode(@intCast(code)); + // Term{ .Exited = os.W.EXITSTATUS(status) }; + } else if (os.W.IFSIGNALED(status)) { + const code = os.W.TERMSIG(status); + Global.exit(128 + @as(u8, @as(u7, @truncate(code)))); + } else if (os.W.IFSTOPPED(status)) { + const code = os.W.STOPSIG(status); + Global.exit(128 + @as(u8, @as(u7, @truncate(code)))); + } else { + // shouldn't be possible, but in case we get an invalid status code just ignore it + return ExecResult.ok; + } + }, + // if the waitpid call failed, we can't get the exit code + else => return ExecResult.failure, + } + } + } else { + try Run.boot(ctx_, entry_path); + return ExecResult.ok; + } + } + + const ExecResult = union(enum) { + code: u8, + failure, + ok, + + pub fn withCode(code: u8) ExecResult { + return ExecResult{ .code = code }; + } + + pub fn notFailure(self: ExecResult) bool { + return switch (self) { + .ok => true, + .failure => false, + .code => |code| code == 0, + }; + } + }; + + pub fn execAll(ctx: Command.Context, comptime bin_dirs_only: bool) !void { + // without filters just behave like normal exec + if (ctx.filters.len == 0) { + switch (try exec(ctx, bin_dirs_only, true, false)) { + .ok => return, + .failure => Global.exit(1), + .code => |code| Global.exit(code), + } + return; + } + + const fsinstance = try bun.fs.FileSystem.init(null); + const olddir = fsinstance.top_level_dir; + defer { + // change back to the original directory once we're done + fsinstance.top_level_dir = olddir; + switch (bun.sys.chdir(olddir)) { + .err => |err| { + Output.prettyErrorln("error: Failed to change directory to {s} due to error {}", .{ olddir, err }); + Global.crash(); + }, + .result => {}, + } + } + + var filter_instance = try FilterArg.FilterSet.init(ctx.allocator, ctx.filters, olddir); + defer filter_instance.deinit(); + + var patterns = std.ArrayList([]u8).init(ctx.allocator); + defer { + for (patterns.items) |path| { + ctx.allocator.free(path); + } + patterns.deinit(); + } + + var root_buf: bun.PathBuffer = undefined; + const resolve_root = try FilterArg.getCandidatePackagePatterns(ctx.allocator, ctx.log, &patterns, olddir, &root_buf); + + var package_json_iter = try FilterArg.PackageFilterIterator.init(ctx.allocator, patterns.items, resolve_root); + defer package_json_iter.deinit(); + + var arena = std.heap.ArenaAllocator.init(ctx.allocator); + var arena_alloc = arena.allocator(); + + var ok = true; + var any_match = false; + while (try package_json_iter.next()) |package_json_path| { + const dirpath = std.fs.path.dirname(package_json_path) orelse Global.crash(); + const path = strings.withoutTrailingSlash(dirpath); + const matches = matches: { + if (filter_instance.has_name_filters) { + // TODO load name from package.json + + const json_file = try std.fs.cwd().openFile( + package_json_path, + .{ .mode = .read_only }, + ); + defer json_file.close(); + + const json_stat_size = try json_file.getEndPos(); + const json_buf = try arena_alloc.alloc(u8, json_stat_size + 64); + defer _ = arena.reset(std.heap.ArenaAllocator.ResetMode.retain_capacity); + + const json_len = try json_file.preadAll(json_buf, 0); + const json_source = bun.logger.Source.initPathString(path, json_buf[0..json_len]); + + var parser = try json_parser.PackageJSONVersionChecker.init(arena_alloc, &json_source, ctx.log); + _ = try parser.parseExpr(); + if (!parser.has_found_name) { + // TODO warn of malformed package + continue; + } + break :matches filter_instance.matchesPathName(path, parser.found_name); + } else { + break :matches filter_instance.matchesPath(path); + } + }; + + if (!matches) continue; + any_match = true; + Output.prettyErrorln("In {s}:", .{path}); + // flush outputs to ensure that stdout and stderr are in the correct order for each of the paths + Output.flush(); + switch (bun.sys.chdir(path)) { + .err => |err| { + Output.prettyErrorln("error: Failed to change directory to {s} due to error {}", .{ path, err }); + Global.crash(); + }, + .result => {}, + } + fsinstance.top_level_dir = path; + + // TODO is this necessary? which assignment is correct here? + fsinstance.fs.cwd = path; + const res = exec(ctx, bin_dirs_only, true, true) catch |err| { + Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); + continue; + }; + ok = ok and res.notFailure(); + } + + if (!any_match) { + Output.prettyErrorln("error: No packages matched the filter", .{}); + Global.exit(1); + } + + if (!ok) { + Global.exit(1); + } + } + pub fn exec( ctx_: Command.Context, comptime bin_dirs_only: bool, comptime log_errors: bool, comptime did_try_open_with_bun_js: bool, - ) !bool { + ) !ExecResult { var ctx = ctx_; // Step 1. Figure out what we're trying to run @@ -1320,7 +1491,7 @@ pub const RunCommand = struct { } return ExecResult.failure; }; - // return ExecResult.ok; + return ExecResult.ok; } if (!did_try_open_with_bun_js and (log_errors or force_using_bun)) { @@ -1482,7 +1653,7 @@ pub const RunCommand = struct { passthrough, ctx.debug.silent, ctx.debug.use_system_shell, - )) return false; + )) return ExecResult.failure; temp_script_buffer[0.."post".len].* = "post".*; @@ -1502,7 +1673,7 @@ pub const RunCommand = struct { } } - return true; + return ExecResult.ok; } } } @@ -1521,7 +1692,8 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - Global.exit(1); + // Global.exit(1); + return ExecResult.failure; }; } @@ -1531,7 +1703,7 @@ pub const RunCommand = struct { var list = std.ArrayList(u8).init(stack_fallback.get()); errdefer list.deinit(); - std.io.getStdIn().reader().readAllArrayList(&list, 1024 * 1024 * 1024) catch return false; + std.io.getStdIn().reader().readAllArrayList(&list, 1024 * 1024 * 1024) catch return ExecResult.failure; ctx.runtime_options.eval.script = list.items; const trigger = bun.pathLiteral("/[stdin]"); @@ -1540,7 +1712,7 @@ pub const RunCommand = struct { @memcpy(entry_point_buf[cwd.len..][0..trigger.len], trigger); const entry_path = entry_point_buf[0 .. cwd.len + trigger.len]; - Run.boot(ctx, ctx.allocator.dupe(u8, entry_path) catch return false) catch |err| { + Run.boot(ctx, ctx.allocator.dupe(u8, entry_path) catch return ExecResult.failure) catch |err| { ctx.log.printForLogLevel(Output.errorWriter()) catch {}; Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ @@ -1550,9 +1722,11 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - Global.exit(1); + // Global.exit(1); + return ExecResult.failure; }; - return true; + // return true; + return ExecResult.ok; } if (Environment.isWindows and bun.FeatureFlags.windows_bunx_fast_path) try_bunx_file: { diff --git a/src/glob.zig b/src/glob.zig index 327cc4bcd4a2b..aa395cbfc2aa7 100644 --- a/src/glob.zig +++ b/src/glob.zig @@ -364,9 +364,7 @@ pub fn GlobWalker_( const root_path = this.walker.cwd; @memcpy(path_buf[0..root_path.len], root_path[0..root_path.len]); path_buf[root_path.len] = 0; - const root_path_z = path_buf[0..root_path.len :0]; - const cwd_fd = switch (Syscall.open(root_path_z, std.os.O.DIRECTORY | std.os.O.RDONLY, 0)) { - .err => |err| return .{ .err = this.walker.handleSysErrWithPath(err, root_path_z) }, + // const root_path_z = path_buf[0..root_path.len :0]; const cwd_fd = switch (try Accessor.open(path_buf[0..root_path.len :0])) { .err => |err| return .{ .err = this.walker.handleSysErrWithPath(err, @ptrCast(path_buf[0 .. root_path.len + 1])) }, .result => |fd| fd, @@ -415,10 +413,10 @@ pub fn GlobWalker_( if (comptime count_fds) this.fds_open -= 1; } - pub fn closeDisallowingCwd(this: *Iterator, fd: bun.FileDescriptor) void { - if (fd == this.cwd_fd or fd == bun.invalid_fd) return; - _ = Syscall.close(fd); - if (bun.Environment.allow_assert) this.fds_open -= 1; + pub fn closeDisallowingCwd(this: *Iterator, fd: Accessor.Handle) void { + if (fd.isZero() or fd.eql(this.cwd_fd)) return; + _ = Accessor.close(fd); + if (comptime count_fds) this.fds_open -= 1; } pub fn bumpOpenFds(this: *Iterator) void { @@ -470,7 +468,7 @@ pub fn GlobWalker_( this.iter_state.directory.next_pattern = if (component_idx + 1 < this.walker.patternComponents.items.len) &this.walker.patternComponents.items[component_idx + 1] else null; this.iter_state.directory.is_last = component_idx == this.walker.patternComponents.items.len - 1; this.iter_state.directory.at_cwd = false; - this.iter_state.directory.fd = bun.invalid_fd; + this.iter_state.directory.fd = Accessor.Handle.zero; const fd: Accessor.Handle = fd: { if (work_item.fd) |fd| break :fd fd; From 63d5cd67d04b64baf3d4b82d2d3866c5082382e4 Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 20:14:52 +0000 Subject: [PATCH 34/74] [autofix.ci] apply automated fixes --- src/cli/run_command.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 821258143ae02..480bc14038fe0 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1320,7 +1320,7 @@ pub const RunCommand = struct { try Run.boot(ctx_, entry_path); return ExecResult.ok; } - } + } const ExecResult = union(enum) { code: u8, From 127a59926aee04d60a53680ceda38685d303cf19 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Thu, 4 Apr 2024 13:27:24 -0700 Subject: [PATCH 35/74] clean up filter-workspace test --- test/cli/run/filter-workspace.test.ts | 36 +++++++++++++-------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index 7874600c906c8..067eba9748593 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -4,8 +4,8 @@ import { spawnSync } from "bun"; import { bunEnv, bunExe, tempDirWithFiles } from "harness"; import { join } from "path"; -let cwd_root = tempDirWithFiles("testworkspace", { - "packages": { +const cwd_root = tempDirWithFiles("testworkspace", { +"packages": { "pkga": { "index.js": "console.log('pkga');", "package.json": JSON.stringify({ @@ -43,10 +43,10 @@ let cwd_root = tempDirWithFiles("testworkspace", { }), }); -let cwd_packages = join(cwd_root, "packages"); -let cwd_a = join(cwd_packages, "pkga"); -let cwd_b = join(cwd_packages, "pkgb"); -let cwd_c = join(cwd_packages, "dirname"); +const cwd_packages = join(cwd_root, "packages"); +const cwd_a = join(cwd_packages, "pkga"); +const cwd_b = join(cwd_packages, "pkgb"); +const cwd_c = join(cwd_packages, "dirname"); function runInCwdSuccess( cwd: string, @@ -55,15 +55,15 @@ function runInCwdSuccess( antipattern?: RegExp | RegExp[], command: string[] = ["present"], ) { - let cmd = [bunExe(), "run"]; - if (pattern instanceof Array) { - for (let p of pattern) { + const cmd = [bunExe(), "run"]; + if (Array.isArray(pattern)) { + for (const p of pattern) { cmd.push("--filter", p); } } else { cmd.push("--filter", pattern); } - for (let c of command) { + for (const c of command) { cmd.push(c); } const { exitCode, stdout, stderr } = spawnSync({ @@ -74,11 +74,11 @@ function runInCwdSuccess( stderr: "pipe", }); const stdoutval = stdout.toString(); - for (let r of target_pattern instanceof Array ? target_pattern : [target_pattern]) { + for (const r of Array.isArray(target_pattern) ? target_pattern : [target_pattern]) { expect(stdoutval).toMatch(r); } if (antipattern !== undefined) { - for (let r of antipattern instanceof Array ? antipattern : [antipattern]) { + for (const r of Array.isArray(antipattern) ? antipattern : [antipattern]) { expect(stdoutval).not.toMatch(r); } } @@ -100,8 +100,8 @@ function runInCwdFailure(cwd: string, pkgname: string, scriptname: string, resul } describe("bun", () => { - let dirs = [cwd_root, cwd_packages, cwd_a, cwd_b, cwd_c]; - let packages = [ + const dirs = [cwd_root, cwd_packages, cwd_a, cwd_b, cwd_c]; + const packages = [ { name: "pkga", output: /scripta/, @@ -116,16 +116,16 @@ describe("bun", () => { }, ]; - let names = packages.map(p => p.name); - for (let d of dirs) { - for (let { name, output } of packages) { + const names = packages.map(p => p.name); + for (const d of dirs) { + for (const { name, output } of packages) { test(`resolve ${name} from ${d}`, () => { runInCwdSuccess(d, name, output); }); } } - for (let d of dirs) { + for (const d of dirs) { test(`resolve '*' from ${d}`, () => { runInCwdSuccess(d, "*", [/scripta/, /scriptb/, /scriptc/]); }); From c39e61d11311e5b3aee5b728c39ef18027c59fdd Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 20:28:30 +0000 Subject: [PATCH 36/74] [autofix.ci] apply automated fixes --- test/cli/run/filter-workspace.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index 067eba9748593..69427f9de67a0 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -5,7 +5,7 @@ import { bunEnv, bunExe, tempDirWithFiles } from "harness"; import { join } from "path"; const cwd_root = tempDirWithFiles("testworkspace", { -"packages": { + "packages": { "pkga": { "index.js": "console.log('pkga');", "package.json": JSON.stringify({ From f1db7ad9561ffaa86bf6b05565cb1014772db48a Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Thu, 4 Apr 2024 14:49:43 -0700 Subject: [PATCH 37/74] fixes to running binaries --- src/cli.zig | 2 +- src/cli/bunx_command.zig | 20 ++--- src/cli/run_command.zig | 118 +++++++++----------------- test/cli/run/filter-workspace.test.ts | 6 +- 4 files changed, 54 insertions(+), 92 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index 8e92d47d01aa7..1fef66e670440 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -1829,7 +1829,7 @@ pub const Command = struct { .failure => {}, .ok => return, .code => |code| { - Global.exit(code); + Global.exitWide(code); }, } diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index f8efa60d4be26..cfefba3cadb64 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -516,7 +516,7 @@ pub const BunxCommand = struct { } } - try Run.runBinary( + const res = try Run.runBinary( ctx, try this_bundler.fs.dirname_store.append(@TypeOf(out), out), destination, @@ -525,8 +525,7 @@ pub const BunxCommand = struct { passthrough, null, ); - // runBinary is noreturn - @compileError("unreachable"); + Global.exit(if (res.notFailure()) 0 else 1); } // 2. The "bin" is possibly not the same as the package name, so we load the package.json to figure out what "bin" to use @@ -554,7 +553,7 @@ pub const BunxCommand = struct { absolute_in_cache_dir, )) |destination| { const out = bun.asByteSlice(destination); - try Run.runBinary( + const res = try Run.runBinary( ctx, try this_bundler.fs.dirname_store.append(@TypeOf(out), out), destination, @@ -563,8 +562,7 @@ pub const BunxCommand = struct { passthrough, null, ); - // runBinary is noreturn - @compileError("unreachable"); + Global.exit(if (res.notFailure()) 0 else 1); } } } else |err| { @@ -674,7 +672,7 @@ pub const BunxCommand = struct { absolute_in_cache_dir, )) |destination| { const out = bun.asByteSlice(destination); - try Run.runBinary( + const res = try Run.runBinary( ctx, try this_bundler.fs.dirname_store.append(@TypeOf(out), out), destination, @@ -683,8 +681,7 @@ pub const BunxCommand = struct { passthrough, null, ); - // runBinary is noreturn - @compileError("unreachable"); + Global.exit(if (res.notFailure()) 0 else 1); } // 2. The "bin" is possibly not the same as the package name, so we load the package.json to figure out what "bin" to use @@ -699,7 +696,7 @@ pub const BunxCommand = struct { absolute_in_cache_dir, )) |destination| { const out = bun.asByteSlice(destination); - try Run.runBinary( + const res = try Run.runBinary( ctx, try this_bundler.fs.dirname_store.append(@TypeOf(out), out), destination, @@ -708,8 +705,7 @@ pub const BunxCommand = struct { passthrough, null, ); - // runBinary is noreturn - @compileError("unreachable"); + Global.exit(if (res.notFailure()) 0 else 1); } } } else |_| {} diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 480bc14038fe0..671ab95ce5a74 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -277,7 +277,7 @@ pub const RunCommand = struct { passthrough: []const string, silent: bool, use_system_shell: bool, - ) !bool { + ) !ExecResult { const shell_bin = findShell(env.get("PATH") orelse "", cwd) orelse return error.MissingShell; const script = original_script; @@ -323,8 +323,7 @@ pub const RunCommand = struct { if (!silent) { Output.prettyErrorln("error: Failed to run script {s} due to error {s}", .{ name, @errorName(err) }); } - - Global.exit(1); + return ExecResult.failure; }; if (code > 0) { @@ -333,10 +332,10 @@ pub const RunCommand = struct { Output.flush(); } - Global.exitWide(code); + return ExecResult.withCode(code); } - return true; + return ExecResult.ok; } const argv = [_]string{ @@ -350,13 +349,23 @@ pub const RunCommand = struct { Output.flush(); } + const envp = try env.map.createNullDelimitedEnvMap(allocator); + defer { + for (envp) |e| { + if (e) |ptr| { + allocator.free(bun.span(ptr)); + } + } + allocator.free(envp); + } + const spawn_result = switch ((bun.spawnSync(&.{ .argv = &argv, .argv0 = shell_bin.ptr, // TODO: remember to free this when we add --filter or --concurrent // in the meantime we don't need to free it. - .envp = try env.map.createNullDelimitedEnvMap(bun.default_allocator), + .envp = envp, .cwd = cwd, .stderr = .inherit, @@ -372,7 +381,7 @@ pub const RunCommand = struct { } Output.flush(); - return true; + return ExecResult.failure; })) { .err => |err| { if (!silent) { @@ -380,7 +389,7 @@ pub const RunCommand = struct { } Output.flush(); - return true; + return ExecResult.failure; }, .result => |result| result, }; @@ -400,7 +409,7 @@ pub const RunCommand = struct { Output.flush(); } - Global.exit(exit_code.code); + return ExecResult.withCode(exit_code.code); } }, @@ -419,13 +428,13 @@ pub const RunCommand = struct { } Output.flush(); - return true; + return ExecResult.failure; }, else => {}, } - return true; + return ExecResult.ok; } /// When printing error messages from 'bun run', attribute bun overridden node.js to bun @@ -453,7 +462,7 @@ pub const RunCommand = struct { env: *DotEnv.Loader, passthrough: []const string, original_script_for_bun_run: ?[]const u8, - ) !noreturn { + ) !ExecResult { // Attempt to find a ".bunx" file on disk, and run it, skipping the // wrapper exe. we build the full exe path even though we could do // a relative lookup, because in the case we do find it, we have to @@ -474,7 +483,7 @@ pub const RunCommand = struct { BunXFastPath.tryLaunch(ctx, wpath, env, passthrough); } - try runBinaryWithoutBunxPath( + return try runBinaryWithoutBunxPath( ctx, executable, executableZ, @@ -485,15 +494,13 @@ pub const RunCommand = struct { ); } - fn runBinaryGenericError(executable: []const u8, silent: bool, err: bun.sys.Error) noreturn { + fn runBinaryGenericError(executable: []const u8, silent: bool, err: bun.sys.Error) void { if (!silent) { Output.prettyErrorln("error: Failed to run \"{s}\" due to:\n{}", .{ basenameOrBun(executable), err.withPath(executable) }); if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } } - - Global.exit(1); } fn runBinaryWithoutBunxPath( @@ -504,7 +511,7 @@ pub const RunCommand = struct { env: *DotEnv.Loader, passthrough: []const string, original_script_for_bun_run: ?[]const u8, - ) !noreturn { + ) !ExecResult { var argv_ = [_]string{executable}; var argv: []const string = &argv_; @@ -563,19 +570,21 @@ pub const RunCommand = struct { } } } - Global.exit(1); + return ExecResult.failure; }; switch (spawn_result) { .err => |err| { // an error occurred while spawning the process runBinaryGenericError(executable, silent, err); + return ExecResult.failure; }, .result => |result| { switch (result.status) { // An error occurred after the process was spawned. .err => |err| { runBinaryGenericError(executable, silent, err); + return ExecResult.failure; }, .signaled => |signal| { @@ -648,7 +657,7 @@ pub const RunCommand = struct { } } - Global.exit(code); + return ExecResult.withCode(code); }, .running => @panic("Unexpected state: process is running"), } @@ -1281,53 +1290,12 @@ pub const RunCommand = struct { Output.flush(); } - pub fn bootMaybeSubprocess(ctx_: Command.Context, entry_path: string, comptime subprocess: bool) !ExecResult { - if (comptime subprocess) { - const pid = std.c.fork(); - if (pid == 0) { - // child - Run.boot(ctx_, entry_path) catch { - Global.exit(1); - }; - Global.exit(0); - } else { - // parent - var cstatus: c_int = 0; - switch (std.c.getErrno(std.c.waitpid(pid, &cstatus, 0))) { - .SUCCESS => { - const status: u32 = @intCast(cstatus); - const os = std.os; - if (os.W.IFEXITED(status)) { - const code = os.W.EXITSTATUS(status); - return ExecResult.withCode(@intCast(code)); - // Term{ .Exited = os.W.EXITSTATUS(status) }; - } else if (os.W.IFSIGNALED(status)) { - const code = os.W.TERMSIG(status); - Global.exit(128 + @as(u8, @as(u7, @truncate(code)))); - } else if (os.W.IFSTOPPED(status)) { - const code = os.W.STOPSIG(status); - Global.exit(128 + @as(u8, @as(u7, @truncate(code)))); - } else { - // shouldn't be possible, but in case we get an invalid status code just ignore it - return ExecResult.ok; - } - }, - // if the waitpid call failed, we can't get the exit code - else => return ExecResult.failure, - } - } - } else { - try Run.boot(ctx_, entry_path); - return ExecResult.ok; - } - } - - const ExecResult = union(enum) { - code: u8, + pub const ExecResult = union(enum) { + code: u32, failure, ok, - pub fn withCode(code: u8) ExecResult { + pub fn withCode(code: u32) ExecResult { return ExecResult{ .code = code }; } @@ -1346,7 +1314,7 @@ pub const RunCommand = struct { switch (try exec(ctx, bin_dirs_only, true, false)) { .ok => return, .failure => Global.exit(1), - .code => |code| Global.exit(code), + .code => |code| Global.exitWide(code), } return; } @@ -1410,7 +1378,7 @@ pub const RunCommand = struct { var parser = try json_parser.PackageJSONVersionChecker.init(arena_alloc, &json_source, ctx.log); _ = try parser.parseExpr(); if (!parser.has_found_name) { - // TODO warn of malformed package + Output.warn("Failed to find package name in {s}\n", .{package_json_path}); continue; } break :matches filter_instance.matchesPathName(path, parser.found_name); @@ -1628,7 +1596,7 @@ pub const RunCommand = struct { defer ctx.allocator.free(temp_script_buffer); if (scripts.get(temp_script_buffer[1..])) |prescript| { - if (!try runPackageScriptForeground( + const res = try runPackageScriptForeground( &ctx, ctx.allocator, prescript, @@ -1638,12 +1606,11 @@ pub const RunCommand = struct { &.{}, ctx.debug.silent, ctx.debug.use_system_shell, - )) { - return ExecResult.failure; - } + ); + if (!res.notFailure()) return ExecResult.failure; } - if (!try runPackageScriptForeground( + const res2 = try runPackageScriptForeground( &ctx, ctx.allocator, script_content, @@ -1653,12 +1620,13 @@ pub const RunCommand = struct { passthrough, ctx.debug.silent, ctx.debug.use_system_shell, - )) return ExecResult.failure; + ); + if (!res2.notFailure()) return ExecResult.failure; temp_script_buffer[0.."post".len].* = "post".*; if (scripts.get(temp_script_buffer)) |postscript| { - if (!try runPackageScriptForeground( + const res3 = try runPackageScriptForeground( &ctx, ctx.allocator, postscript, @@ -1668,9 +1636,8 @@ pub const RunCommand = struct { &.{}, ctx.debug.silent, ctx.debug.use_system_shell, - )) { - return ExecResult.failure; - } + ); + if (!res3.notFailure()) return ExecResult.failure; } return ExecResult.ok; @@ -1692,7 +1659,6 @@ pub const RunCommand = struct { if (@errorReturnTrace()) |trace| { std.debug.dumpStackTrace(trace.*); } - // Global.exit(1); return ExecResult.failure; }; } diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index 69427f9de67a0..dd36b63c8d18a 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -1,4 +1,3 @@ -// @known-failing-on-windows: 1 failing import { describe, test, expect, beforeAll } from "bun:test"; import { spawnSync } from "bun"; import { bunEnv, bunExe, tempDirWithFiles } from "harness"; @@ -144,8 +143,9 @@ describe("bun", () => { runInCwdSuccess(cwd_root, "./packages/pkg*", [/scripta/, /scriptb/], /scriptc/); }); - test("resolve and run all js scripts", () => { - runInCwdSuccess(cwd_root, "*", [/pkga/, /pkgb/, /pkgc/], [], ["index.js"]); + test.todo("resolve and run all js scripts", () => { + console.log(cwd_root); + runInCwdSuccess(cwd_root, "*", [/pkga/, /pkgb/, /pkgc/], [], ["./index.js"]); }); test("run binaries in package directories", () => { From 7d122b3d156e0845b4bccff821bebbdfefce9527 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Thu, 4 Apr 2024 16:08:22 -0700 Subject: [PATCH 38/74] fix actually setting cwd_override --- src/cli.zig | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/cli.zig b/src/cli.zig index 1fef66e670440..6b30662ba7db4 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -435,8 +435,9 @@ pub const Arguments = struct { break :brk try allocator.dupe(u8, out); }; } else { - cwd = try bun.getcwd(&cwd_buf); + cwd = try allocator.dupe(u8, try bun.getcwd(&cwd_buf)); } + ctx.args.cwd_override = cwd; if (cmd == .RunCommand) { ctx.filters = args.options("--filter"); From ca4c96da1bb4378a0ca95a2bfbe94a0ebffdddb5 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Thu, 4 Apr 2024 17:13:52 -0700 Subject: [PATCH 39/74] windows fixes --- src/cli/filter_arg.zig | 3 +++ src/glob.zig | 3 ++- test/cli/run/filter-workspace.test.ts | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index 10bf105b4d37d..0bd5143140f86 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -136,6 +136,9 @@ pub const FilterSet = struct { var codepointer_iter = strings.UnsignedCodepointIterator.init(filter_utf8); var cursor = strings.UnsignedCodepointIterator.Cursor{}; while (codepointer_iter.next(&cursor)) { + if (cursor.c == @as(u32, '\\')) { + try filter_utf32.append(self.allocator, cursor.c); + } try filter_utf32.append(self.allocator, cursor.c); } self.has_name_filters = self.has_name_filters or !is_path; diff --git a/src/glob.zig b/src/glob.zig index aa395cbfc2aa7..98ba1c56682be 100644 --- a/src/glob.zig +++ b/src/glob.zig @@ -248,7 +248,8 @@ pub const DirEntryAccessor = struct { pub fn openat(handle: Handle, path_: [:0]const u8) !Maybe(Handle) { var path: []const u8 = path_; var buf: bun.PathBuffer = undefined; - if (path.len > 0 and path[0] != '/') { + + if (!bun.path.Platform.auto.isAbsolute(path)) { if (handle.value) |entry| { path = bun.path.joinStringBuf(&buf, [_][]const u8{ entry.dir, path }, .auto); } diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index dd36b63c8d18a..f7d356da0ddb0 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -149,7 +149,7 @@ describe("bun", () => { }); test("run binaries in package directories", () => { - runInCwdSuccess(cwd_root, "*", [/pkga/, /pkgb/, /dirname/], [], ["pwd"]); + runInCwdSuccess(cwd_root, "*", [/pkga/, /pkgb/, /dirname/], [], ["bun", "-e", "console.log(process.cwd())"]); }); test("should error with missing script", () => { From 4faf32d219333010c1a5ebabdd725e3a7487dc4a Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Fri, 5 Apr 2024 10:44:59 -0700 Subject: [PATCH 40/74] address some review comments --- src/cli.zig | 9 ++------- src/cli/bunx_command.zig | 8 ++++---- src/cli/filter_arg.zig | 3 ++- src/cli/run_command.zig | 31 ++++++++++++++++++------------- src/glob.zig | 13 ------------- 5 files changed, 26 insertions(+), 38 deletions(-) diff --git a/src/cli.zig b/src/cli.zig index 6b30662ba7db4..b12d71b53b607 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -1826,13 +1826,8 @@ pub const Command = struct { if (ctx.filters.len > 0) { Output.prettyln("warn: Filters are ignored for auto command", .{}); } - switch (try RunCommand.exec(ctx, true, false, false)) { - .failure => {}, - .ok => return, - .code => |code| { - Global.exitWide(code); - }, - } + const res = try RunCommand.exec(ctx, true, false, false); + res.exit(); Output.prettyErrorln("error: Script not found \"{s}\"", .{ ctx.positionals[0], diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index cfefba3cadb64..547793d61c1eb 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -525,7 +525,7 @@ pub const BunxCommand = struct { passthrough, null, ); - Global.exit(if (res.notFailure()) 0 else 1); + res.exit(); } // 2. The "bin" is possibly not the same as the package name, so we load the package.json to figure out what "bin" to use @@ -562,7 +562,7 @@ pub const BunxCommand = struct { passthrough, null, ); - Global.exit(if (res.notFailure()) 0 else 1); + res.exit(); } } } else |err| { @@ -681,7 +681,7 @@ pub const BunxCommand = struct { passthrough, null, ); - Global.exit(if (res.notFailure()) 0 else 1); + res.exit(); } // 2. The "bin" is possibly not the same as the package name, so we load the package.json to figure out what "bin" to use @@ -705,7 +705,7 @@ pub const BunxCommand = struct { passthrough, null, ); - Global.exit(if (res.notFailure()) 0 else 1); + res.exit(); } } } else |_| {} diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index 0bd5143140f86..bf42b04e6c1ea 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -92,7 +92,8 @@ pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logge }, else => { // TODO log error and fail - Global.crash(); + Output.prettyErrorln("error: Failed to parse \"workspaces\" property: all items must be strings", .{}); + Global.exit(1); }, } } diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 671ab95ce5a74..e1ae504e94c64 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -363,8 +363,6 @@ pub const RunCommand = struct { .argv = &argv, .argv0 = shell_bin.ptr, - // TODO: remember to free this when we add --filter or --concurrent - // in the meantime we don't need to free it. .envp = envp, .cwd = cwd, @@ -1292,6 +1290,7 @@ pub const RunCommand = struct { pub const ExecResult = union(enum) { code: u32, + signal: u32, failure, ok, @@ -1299,24 +1298,30 @@ pub const RunCommand = struct { return ExecResult{ .code = code }; } - pub fn notFailure(self: ExecResult) bool { + pub fn isOk(self: ExecResult) bool { return switch (self) { .ok => true, .failure => false, .code => |code| code == 0, + .signal => false, }; } + + pub fn exit(self: ExecResult) noreturn { + switch (self) { + .ok => Global.exit(0), + .failure => Global.exit(1), + .code => |code| Global.exitWide(code), + .signal => |code| Global.raiseIgnoringPanicHandler(code), + } + } }; pub fn execAll(ctx: Command.Context, comptime bin_dirs_only: bool) !void { // without filters just behave like normal exec if (ctx.filters.len == 0) { - switch (try exec(ctx, bin_dirs_only, true, false)) { - .ok => return, - .failure => Global.exit(1), - .code => |code| Global.exitWide(code), - } - return; + const res = try exec(ctx, bin_dirs_only, true, false); + res.exit(); } const fsinstance = try bun.fs.FileSystem.init(null); @@ -1407,7 +1412,7 @@ pub const RunCommand = struct { Output.prettyErrorln("error: Failed to run {s} due to error {s}", .{ path, @errorName(err) }); continue; }; - ok = ok and res.notFailure(); + ok = ok and res.isOk(); } if (!any_match) { @@ -1607,7 +1612,7 @@ pub const RunCommand = struct { ctx.debug.silent, ctx.debug.use_system_shell, ); - if (!res.notFailure()) return ExecResult.failure; + if (!res.isOk()) return ExecResult.failure; } const res2 = try runPackageScriptForeground( @@ -1621,7 +1626,7 @@ pub const RunCommand = struct { ctx.debug.silent, ctx.debug.use_system_shell, ); - if (!res2.notFailure()) return ExecResult.failure; + if (!res2.isOk()) return ExecResult.failure; temp_script_buffer[0.."post".len].* = "post".*; @@ -1637,7 +1642,7 @@ pub const RunCommand = struct { ctx.debug.silent, ctx.debug.use_system_shell, ); - if (!res3.notFailure()) return ExecResult.failure; + if (!res3.isOk()) return ExecResult.failure; } return ExecResult.ok; diff --git a/src/glob.zig b/src/glob.zig index 98ba1c56682be..418ed73914283 100644 --- a/src/glob.zig +++ b/src/glob.zig @@ -783,19 +783,6 @@ pub fn GlobWalker_( error_on_broken_symlinks: bool, only_files: bool, ) !Maybe(void) { - errdefer arena.deinit(); - var cwd: []const u8 = undefined; - switch (Accessor.getcwd(&this.pathBuf)) { - .err => |err| { - return .{ .err = err }; - }, - .result => |result| { - const copiedCwd = try arena.allocator().alloc(u8, result.len); - @memcpy(copiedCwd, result); - cwd = copiedCwd; - }, - } - return try this.initWithCwd( arena, pattern, From 7e7e55dc6883e142dd20915e50ed0d77febeb780 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Fri, 5 Apr 2024 13:08:10 -0700 Subject: [PATCH 41/74] handle malformed JSON --- src/cli/filter_arg.zig | 29 ++++++++++----------------- src/cli/run_command.zig | 5 ++++- src/json_parser.zig | 3 --- test/cli/run/filter-workspace.test.ts | 12 ++++++++++- 4 files changed, 26 insertions(+), 23 deletions(-) diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index bf42b04e6c1ea..b942db8ad8f1a 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -50,17 +50,17 @@ pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logge log.errors = 0; log.warnings = 0; - const json_file = std.fs.cwd().openFileZ( - name_buf[0 .. parent_trimmed.len + "/package.json".len :0].ptr, - .{ .mode = .read_only }, - ) catch continue; - defer json_file.close(); - - const json_stat_size = try json_file.getEndPos(); - const json_buf = try allocator.alloc(u8, json_stat_size + 64); - defer allocator.free(json_buf); - const json_len = try json_file.preadAll(json_buf, 0); - const json_source = bun.logger.Source.initPathString(json_path, json_buf[0..json_len]); + const json_source = switch (bun.sys.File.toSource(json_path, allocator)) { + .err => |err| { + switch (err.getErrno()) { + .NOENT, .ACCES, .PERM => continue, + else => |errno| return bun.errnoToZigErr(errno), + } + }, + .result => |source| source, + }; + defer allocator.free(json_source.contents); + const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator); const prop = json.asProperty("workspaces") orelse continue; @@ -77,21 +77,14 @@ pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logge for (json_array.slice()) |expr| { switch (expr.data) { .e_string => |pattern_expr| { - // /basepath/pattern/package.json - // const size = parent_trimmed.len + 1 + pattern_expr.data.len + "/package.json".len; const size = pattern_expr.data.len + "/package.json".len; var pattern = try allocator.alloc(u8, size); @memcpy(pattern[0..pattern_expr.data.len], pattern_expr.data); @memcpy(pattern[pattern_expr.data.len..size], "/package.json"); - // @memcpy(pattern[0..parent_trimmed.len], parent_trimmed); - // pattern[parent_trimmed.len] = '/'; - // @memcpy(pattern[parent_trimmed.len + 1 .. parent_trimmed.len + 1 + pattern_expr.data.len], pattern_expr.data); - // @memcpy(pattern[parent_trimmed.len + 1 + pattern_expr.data.len .. size], "/package.json"); try out_patterns.append(pattern); }, else => { - // TODO log error and fail Output.prettyErrorln("error: Failed to parse \"workspaces\" property: all items must be strings", .{}); Global.exit(1); }, diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index e1ae504e94c64..c0a2e05c07d6f 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1381,7 +1381,10 @@ pub const RunCommand = struct { const json_source = bun.logger.Source.initPathString(path, json_buf[0..json_len]); var parser = try json_parser.PackageJSONVersionChecker.init(arena_alloc, &json_source, ctx.log); - _ = try parser.parseExpr(); + _ = parser.parseExpr() catch { + Output.warn("Failed to parse package.json in {s}\n", .{package_json_path}); + continue; + }; if (!parser.has_found_name) { Output.warn("Failed to find package name in {s}\n", .{package_json_path}); continue; diff --git a/src/json_parser.zig b/src/json_parser.zig index 4ce9a12aaf5bd..e1786e718022c 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -506,9 +506,6 @@ pub const PackageJSONVersionChecker = struct { }, else => { try p.lexer.unexpected(); - if (comptime Environment.isDebug) { - @breakpoint(); - } return error.ParserError; }, } diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index f7d356da0ddb0..12e72b1f39a55 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -32,13 +32,23 @@ const cwd_root = tempDirWithFiles("testworkspace", { }, }), }, + "malformed1": { + "package.json": JSON.stringify({ + "scripts": { + "present": "echo malformed1", + } + }) + }, + "malformed2": { + "package.json": "asdfsadfas", + } }, "package.json": JSON.stringify({ "name": "ws", "scripts": { "present": "echo rootscript", }, - "workspaces": ["packages/pkga", "packages/pkgb", "packages/dirname"], + "workspaces": ["packages/pkga", "packages/pkgb", "packages/dirname", "packages/malformed1", "packages/malformed2"], }), }); From 101a893bc25e69e1bdcd70c6b65a35b0733d95a9 Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Fri, 5 Apr 2024 13:18:32 -0700 Subject: [PATCH 42/74] add various tests --- test/cli/run/filter-workspace.test.ts | 353 +++++++++++++++----------- 1 file changed, 210 insertions(+), 143 deletions(-) diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index 12e72b1f39a55..453da81085fc6 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -4,52 +4,62 @@ import { bunEnv, bunExe, tempDirWithFiles } from "harness"; import { join } from "path"; const cwd_root = tempDirWithFiles("testworkspace", { - "packages": { - "pkga": { - "index.js": "console.log('pkga');", - "package.json": JSON.stringify({ - "name": "pkga", - "scripts": { - "present": "echo scripta", - }, - }), - }, - "pkgb": { - "index.js": "console.log('pkgb');", - "package.json": JSON.stringify({ - "name": "pkgb", - "scripts": { - "present": "echo scriptb", - }, - }), - }, - "dirname": { - "index.js": "console.log('pkgc');", - "package.json": JSON.stringify({ - "name": "pkgc", - "scripts": { - "present": "echo scriptc", - }, - }), - }, - "malformed1": { - "package.json": JSON.stringify({ - "scripts": { - "present": "echo malformed1", - } - }) - }, - "malformed2": { - "package.json": "asdfsadfas", - } - }, - "package.json": JSON.stringify({ - "name": "ws", - "scripts": { - "present": "echo rootscript", - }, - "workspaces": ["packages/pkga", "packages/pkgb", "packages/dirname", "packages/malformed1", "packages/malformed2"], - }), + packages: { + pkga: { + "index.js": "console.log('pkga');", + "package.json": JSON.stringify({ + name: "pkga", + scripts: { + present: "echo scripta", + }, + }), + }, + pkgb: { + "index.js": "console.log('pkgb');", + "package.json": JSON.stringify({ + name: "pkgb", + scripts: { + present: "echo scriptb", + }, + }), + }, + dirname: { + "index.js": "console.log('pkgc');", + "package.json": JSON.stringify({ + name: "pkgc", + scripts: { + present: "echo scriptc", + }, + }), + }, + malformed1: { + "package.json": JSON.stringify({ + scripts: { + present: "echo malformed1", + }, + }), + }, + malformed2: { + "package.json": "asdfsadfas", + }, + missing: { + foo: "bar", + }, + }, + "package.json": JSON.stringify({ + name: "ws", + scripts: { + present: "echo rootscript", + }, + workspaces: [ + "packages/pkga", + "packages/pkgb", + "packages/dirname", + "packages/malformed1", + "packages/malformed2", + "packages/missing", + ], + }), }); const cwd_packages = join(cwd_root, "packages"); @@ -57,112 +67,169 @@ const cwd_a = join(cwd_packages, "pkga"); const cwd_b = join(cwd_packages, "pkgb"); const cwd_c = join(cwd_packages, "dirname"); -function runInCwdSuccess( - cwd: string, - pattern: string | string[], - target_pattern: RegExp | RegExp[], - antipattern?: RegExp | RegExp[], - command: string[] = ["present"], -) { - const cmd = [bunExe(), "run"]; - if (Array.isArray(pattern)) { - for (const p of pattern) { - cmd.push("--filter", p); - } - } else { - cmd.push("--filter", pattern); - } - for (const c of command) { - cmd.push(c); - } - const { exitCode, stdout, stderr } = spawnSync({ - cwd: cwd, - cmd: cmd, - env: bunEnv, - stdout: "pipe", - stderr: "pipe", - }); - const stdoutval = stdout.toString(); - for (const r of Array.isArray(target_pattern) ? target_pattern : [target_pattern]) { - expect(stdoutval).toMatch(r); - } - if (antipattern !== undefined) { - for (const r of Array.isArray(antipattern) ? antipattern : [antipattern]) { - expect(stdoutval).not.toMatch(r); - } - } - // expect(stderr.toString()).toBeEmpty(); - expect(exitCode).toBe(0); +function runInCwdSuccess({ + cwd, + pattern, + target_pattern, + antipattern, + command = ["present"], +}: { + cwd: string; + pattern: string | string[]; + target_pattern: RegExp | RegExp[]; + antipattern?: RegExp | RegExp[]; + command?: string[]; +}) { + const cmd = [bunExe(), "run"]; + if (Array.isArray(pattern)) { + for (const p of pattern) { + cmd.push("--filter", p); + } + } else { + cmd.push("--filter", pattern); + } + for (const c of command) { + cmd.push(c); + } + const { exitCode, stdout, stderr } = spawnSync({ + cwd: cwd, + cmd: cmd, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + const stdoutval = stdout.toString(); + for (const r of Array.isArray(target_pattern) + ? target_pattern + : [target_pattern]) { + expect(stdoutval).toMatch(r); + } + if (antipattern !== undefined) { + for (const r of Array.isArray(antipattern) ? antipattern : [antipattern]) { + expect(stdoutval).not.toMatch(r); + } + } + // expect(stderr.toString()).toBeEmpty(); + expect(exitCode).toBe(0); } -function runInCwdFailure(cwd: string, pkgname: string, scriptname: string, result: RegExp) { - const { exitCode, stdout, stderr } = spawnSync({ - cwd: cwd, - cmd: [bunExe(), "run", "--filter", pkgname, scriptname], - env: bunEnv, - stdout: "pipe", - stderr: "pipe", - }); - expect(stdout.toString()).toBeEmpty(); - expect(stderr.toString()).toMatch(result); - expect(exitCode).toBe(1); +function runInCwdFailure( + cwd: string, + pkgname: string, + scriptname: string, + result: RegExp, +) { + const { exitCode, stdout, stderr } = spawnSync({ + cwd: cwd, + cmd: [bunExe(), "run", "--filter", pkgname, scriptname], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + expect(stdout.toString()).toBeEmpty(); + expect(stderr.toString()).toMatch(result); + expect(exitCode).toBe(1); } describe("bun", () => { - const dirs = [cwd_root, cwd_packages, cwd_a, cwd_b, cwd_c]; - const packages = [ - { - name: "pkga", - output: /scripta/, - }, - { - name: "pkgb", - output: /scriptb/, - }, - { - name: "pkgc", - output: /scriptc/, - }, - ]; + const dirs = [cwd_root, cwd_packages, cwd_a, cwd_b, cwd_c]; + const packages = [ + { + name: "pkga", + output: /scripta/, + }, + { + name: "pkgb", + output: /scriptb/, + }, + { + name: "pkgc", + output: /scriptc/, + }, + ]; - const names = packages.map(p => p.name); - for (const d of dirs) { - for (const { name, output } of packages) { - test(`resolve ${name} from ${d}`, () => { - runInCwdSuccess(d, name, output); - }); - } - } + const names = packages.map((p) => p.name); + for (const d of dirs) { + for (const { name, output } of packages) { + test(`resolve ${name} from ${d}`, () => { + runInCwdSuccess({ cwd: d, pattern: name, target_pattern: output }); + }); + } + } - for (const d of dirs) { - test(`resolve '*' from ${d}`, () => { - runInCwdSuccess(d, "*", [/scripta/, /scriptb/, /scriptc/]); - }); - test(`resolve all from ${d}`, () => { - runInCwdSuccess(d, names, [/scripta/, /scriptb/, /scriptc/]); - }); - } + for (const d of dirs) { + test(`resolve '*' from ${d}`, () => { + runInCwdSuccess({ + cwd: d, + pattern: "*", + target_pattern: [/scripta/, /scriptb/, /scriptc/], + }); + }); + test(`resolve all from ${d}`, () => { + runInCwdSuccess({ + cwd: d, + pattern: names, + target_pattern: [/scripta/, /scriptb/, /scriptc/], + }); + }); + } - test("resolve all with glob", () => { - runInCwdSuccess(cwd_root, "./packages/*", [/scripta/, /scriptb/, /scriptc/]); - }); - test("resolve all with recursive glob", () => { - runInCwdSuccess(cwd_root, "./**", [/scripta/, /scriptb/, /scriptc/]); - }); - test("resolve 'pkga' and 'pkgb' but not 'pkgc' with targeted glob", () => { - runInCwdSuccess(cwd_root, "./packages/pkg*", [/scripta/, /scriptb/], /scriptc/); - }); + test("resolve all with glob", () => { + runInCwdSuccess({ + cwd: cwd_root, + pattern: "./packages/*", + target_pattern: [/scripta/, /scriptb/, /scriptc/, /malformed1/], + }); + }); + test("resolve all with recursive glob", () => { + runInCwdSuccess({ + cwd: cwd_root, + pattern: "./**", + target_pattern: [/scripta/, /scriptb/, /scriptc/, /malformed1/], + }); + }); + test("resolve 'pkga' and 'pkgb' but not 'pkgc' with targeted glob", () => { + runInCwdSuccess({ + cwd: cwd_root, + pattern: "./packages/pkg*", + target_pattern: [/scripta/, /scriptb/], + antipattern: /scriptc/, + }); + }); + test("resolve package with missing name", () => { + runInCwdSuccess({ + cwd: cwd_root, + pattern: "./packages/malformed1", + target_pattern: [/malformed1/], + antipattern: [/scripta/, /scriptb/, /scriptc/], + }); + }); - test.todo("resolve and run all js scripts", () => { - console.log(cwd_root); - runInCwdSuccess(cwd_root, "*", [/pkga/, /pkgb/, /pkgc/], [], ["./index.js"]); - }); + test.todo("resolve and run all js scripts", () => { + console.log(cwd_root); + runInCwdSuccess({ + cwd: cwd_root, + pattern: "*", + target_pattern: [/pkga/, /pkgb/, /pkgc/], + antipattern: [], + command: ["./index.js"], + }); + }); - test("run binaries in package directories", () => { - runInCwdSuccess(cwd_root, "*", [/pkga/, /pkgb/, /dirname/], [], ["bun", "-e", "console.log(process.cwd())"]); - }); + test("run binaries in package directories", () => { + runInCwdSuccess({ + cwd: cwd_root, + pattern: "*", + target_pattern: [/pkga/, /pkgb/, /dirname/], + antipattern: [], + command: ["bun", "-e", "console.log(process.cwd())"], + }); + }); - test("should error with missing script", () => { - runInCwdFailure(cwd_root, "*", "notpresent", /found/); - }); + test("should error with missing script", () => { + runInCwdFailure(cwd_root, "*", "notpresent", /found/); + }); + test("should warn about malformed package.json", () => { + runInCwdFailure(cwd_root, "*", "x", /Failed to parse package.json/); + }); }); From b4300bb0292b47c65d1e12718a6bac03bd900984 Mon Sep 17 00:00:00 2001 From: "autofix-ci[bot]" <114827586+autofix-ci[bot]@users.noreply.github.com> Date: Fri, 5 Apr 2024 20:20:06 +0000 Subject: [PATCH 43/74] [autofix.ci] apply automated fixes --- test/cli/run/filter-workspace.test.ts | 407 +++++++++++++------------- 1 file changed, 200 insertions(+), 207 deletions(-) diff --git a/test/cli/run/filter-workspace.test.ts b/test/cli/run/filter-workspace.test.ts index 453da81085fc6..4142e9038f026 100644 --- a/test/cli/run/filter-workspace.test.ts +++ b/test/cli/run/filter-workspace.test.ts @@ -4,62 +4,62 @@ import { bunEnv, bunExe, tempDirWithFiles } from "harness"; import { join } from "path"; const cwd_root = tempDirWithFiles("testworkspace", { - packages: { - pkga: { - "index.js": "console.log('pkga');", - "package.json": JSON.stringify({ - name: "pkga", - scripts: { - present: "echo scripta", - }, - }), - }, - pkgb: { - "index.js": "console.log('pkgb');", - "package.json": JSON.stringify({ - name: "pkgb", - scripts: { - present: "echo scriptb", - }, - }), - }, - dirname: { - "index.js": "console.log('pkgc');", - "package.json": JSON.stringify({ - name: "pkgc", - scripts: { - present: "echo scriptc", - }, - }), - }, - malformed1: { - "package.json": JSON.stringify({ - scripts: { - present: "echo malformed1", - }, - }), - }, - malformed2: { - "package.json": "asdfsadfas", - }, - missing: { - foo: "bar", - }, - }, - "package.json": JSON.stringify({ - name: "ws", - scripts: { - present: "echo rootscript", - }, - workspaces: [ - "packages/pkga", - "packages/pkgb", - "packages/dirname", - "packages/malformed1", - "packages/malformed2", - "packages/missing", - ], - }), + packages: { + pkga: { + "index.js": "console.log('pkga');", + "package.json": JSON.stringify({ + name: "pkga", + scripts: { + present: "echo scripta", + }, + }), + }, + pkgb: { + "index.js": "console.log('pkgb');", + "package.json": JSON.stringify({ + name: "pkgb", + scripts: { + present: "echo scriptb", + }, + }), + }, + dirname: { + "index.js": "console.log('pkgc');", + "package.json": JSON.stringify({ + name: "pkgc", + scripts: { + present: "echo scriptc", + }, + }), + }, + malformed1: { + "package.json": JSON.stringify({ + scripts: { + present: "echo malformed1", + }, + }), + }, + malformed2: { + "package.json": "asdfsadfas", + }, + missing: { + foo: "bar", + }, + }, + "package.json": JSON.stringify({ + name: "ws", + scripts: { + present: "echo rootscript", + }, + workspaces: [ + "packages/pkga", + "packages/pkgb", + "packages/dirname", + "packages/malformed1", + "packages/malformed2", + "packages/missing", + ], + }), }); const cwd_packages = join(cwd_root, "packages"); @@ -68,168 +68,161 @@ const cwd_b = join(cwd_packages, "pkgb"); const cwd_c = join(cwd_packages, "dirname"); function runInCwdSuccess({ - cwd, - pattern, - target_pattern, - antipattern, - command = ["present"], + cwd, + pattern, + target_pattern, + antipattern, + command = ["present"], }: { - cwd: string; - pattern: string | string[]; - target_pattern: RegExp | RegExp[]; - antipattern?: RegExp | RegExp[]; - command?: string[]; + cwd: string; + pattern: string | string[]; + target_pattern: RegExp | RegExp[]; + antipattern?: RegExp | RegExp[]; + command?: string[]; }) { - const cmd = [bunExe(), "run"]; - if (Array.isArray(pattern)) { - for (const p of pattern) { - cmd.push("--filter", p); - } - } else { - cmd.push("--filter", pattern); - } - for (const c of command) { - cmd.push(c); - } - const { exitCode, stdout, stderr } = spawnSync({ - cwd: cwd, - cmd: cmd, - env: bunEnv, - stdout: "pipe", - stderr: "pipe", - }); - const stdoutval = stdout.toString(); - for (const r of Array.isArray(target_pattern) - ? target_pattern - : [target_pattern]) { - expect(stdoutval).toMatch(r); - } - if (antipattern !== undefined) { - for (const r of Array.isArray(antipattern) ? antipattern : [antipattern]) { - expect(stdoutval).not.toMatch(r); - } - } - // expect(stderr.toString()).toBeEmpty(); - expect(exitCode).toBe(0); + const cmd = [bunExe(), "run"]; + if (Array.isArray(pattern)) { + for (const p of pattern) { + cmd.push("--filter", p); + } + } else { + cmd.push("--filter", pattern); + } + for (const c of command) { + cmd.push(c); + } + const { exitCode, stdout, stderr } = spawnSync({ + cwd: cwd, + cmd: cmd, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + const stdoutval = stdout.toString(); + for (const r of Array.isArray(target_pattern) ? target_pattern : [target_pattern]) { + expect(stdoutval).toMatch(r); + } + if (antipattern !== undefined) { + for (const r of Array.isArray(antipattern) ? antipattern : [antipattern]) { + expect(stdoutval).not.toMatch(r); + } + } + // expect(stderr.toString()).toBeEmpty(); + expect(exitCode).toBe(0); } -function runInCwdFailure( - cwd: string, - pkgname: string, - scriptname: string, - result: RegExp, -) { - const { exitCode, stdout, stderr } = spawnSync({ - cwd: cwd, - cmd: [bunExe(), "run", "--filter", pkgname, scriptname], - env: bunEnv, - stdout: "pipe", - stderr: "pipe", - }); - expect(stdout.toString()).toBeEmpty(); - expect(stderr.toString()).toMatch(result); - expect(exitCode).toBe(1); +function runInCwdFailure(cwd: string, pkgname: string, scriptname: string, result: RegExp) { + const { exitCode, stdout, stderr } = spawnSync({ + cwd: cwd, + cmd: [bunExe(), "run", "--filter", pkgname, scriptname], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + expect(stdout.toString()).toBeEmpty(); + expect(stderr.toString()).toMatch(result); + expect(exitCode).toBe(1); } describe("bun", () => { - const dirs = [cwd_root, cwd_packages, cwd_a, cwd_b, cwd_c]; - const packages = [ - { - name: "pkga", - output: /scripta/, - }, - { - name: "pkgb", - output: /scriptb/, - }, - { - name: "pkgc", - output: /scriptc/, - }, - ]; + const dirs = [cwd_root, cwd_packages, cwd_a, cwd_b, cwd_c]; + const packages = [ + { + name: "pkga", + output: /scripta/, + }, + { + name: "pkgb", + output: /scriptb/, + }, + { + name: "pkgc", + output: /scriptc/, + }, + ]; - const names = packages.map((p) => p.name); - for (const d of dirs) { - for (const { name, output } of packages) { - test(`resolve ${name} from ${d}`, () => { - runInCwdSuccess({ cwd: d, pattern: name, target_pattern: output }); - }); - } - } + const names = packages.map(p => p.name); + for (const d of dirs) { + for (const { name, output } of packages) { + test(`resolve ${name} from ${d}`, () => { + runInCwdSuccess({ cwd: d, pattern: name, target_pattern: output }); + }); + } + } - for (const d of dirs) { - test(`resolve '*' from ${d}`, () => { - runInCwdSuccess({ - cwd: d, - pattern: "*", - target_pattern: [/scripta/, /scriptb/, /scriptc/], - }); - }); - test(`resolve all from ${d}`, () => { - runInCwdSuccess({ - cwd: d, - pattern: names, - target_pattern: [/scripta/, /scriptb/, /scriptc/], - }); - }); - } + for (const d of dirs) { + test(`resolve '*' from ${d}`, () => { + runInCwdSuccess({ + cwd: d, + pattern: "*", + target_pattern: [/scripta/, /scriptb/, /scriptc/], + }); + }); + test(`resolve all from ${d}`, () => { + runInCwdSuccess({ + cwd: d, + pattern: names, + target_pattern: [/scripta/, /scriptb/, /scriptc/], + }); + }); + } - test("resolve all with glob", () => { - runInCwdSuccess({ - cwd: cwd_root, - pattern: "./packages/*", - target_pattern: [/scripta/, /scriptb/, /scriptc/, /malformed1/], - }); - }); - test("resolve all with recursive glob", () => { - runInCwdSuccess({ - cwd: cwd_root, - pattern: "./**", - target_pattern: [/scripta/, /scriptb/, /scriptc/, /malformed1/], - }); - }); - test("resolve 'pkga' and 'pkgb' but not 'pkgc' with targeted glob", () => { - runInCwdSuccess({ - cwd: cwd_root, - pattern: "./packages/pkg*", - target_pattern: [/scripta/, /scriptb/], - antipattern: /scriptc/, - }); - }); - test("resolve package with missing name", () => { - runInCwdSuccess({ - cwd: cwd_root, - pattern: "./packages/malformed1", - target_pattern: [/malformed1/], - antipattern: [/scripta/, /scriptb/, /scriptc/], - }); - }); + test("resolve all with glob", () => { + runInCwdSuccess({ + cwd: cwd_root, + pattern: "./packages/*", + target_pattern: [/scripta/, /scriptb/, /scriptc/, /malformed1/], + }); + }); + test("resolve all with recursive glob", () => { + runInCwdSuccess({ + cwd: cwd_root, + pattern: "./**", + target_pattern: [/scripta/, /scriptb/, /scriptc/, /malformed1/], + }); + }); + test("resolve 'pkga' and 'pkgb' but not 'pkgc' with targeted glob", () => { + runInCwdSuccess({ + cwd: cwd_root, + pattern: "./packages/pkg*", + target_pattern: [/scripta/, /scriptb/], + antipattern: /scriptc/, + }); + }); + test("resolve package with missing name", () => { + runInCwdSuccess({ + cwd: cwd_root, + pattern: "./packages/malformed1", + target_pattern: [/malformed1/], + antipattern: [/scripta/, /scriptb/, /scriptc/], + }); + }); - test.todo("resolve and run all js scripts", () => { - console.log(cwd_root); - runInCwdSuccess({ - cwd: cwd_root, - pattern: "*", - target_pattern: [/pkga/, /pkgb/, /pkgc/], - antipattern: [], - command: ["./index.js"], - }); - }); + test.todo("resolve and run all js scripts", () => { + console.log(cwd_root); + runInCwdSuccess({ + cwd: cwd_root, + pattern: "*", + target_pattern: [/pkga/, /pkgb/, /pkgc/], + antipattern: [], + command: ["./index.js"], + }); + }); - test("run binaries in package directories", () => { - runInCwdSuccess({ - cwd: cwd_root, - pattern: "*", - target_pattern: [/pkga/, /pkgb/, /dirname/], - antipattern: [], - command: ["bun", "-e", "console.log(process.cwd())"], - }); - }); + test("run binaries in package directories", () => { + runInCwdSuccess({ + cwd: cwd_root, + pattern: "*", + target_pattern: [/pkga/, /pkgb/, /dirname/], + antipattern: [], + command: ["bun", "-e", "console.log(process.cwd())"], + }); + }); - test("should error with missing script", () => { - runInCwdFailure(cwd_root, "*", "notpresent", /found/); - }); - test("should warn about malformed package.json", () => { - runInCwdFailure(cwd_root, "*", "x", /Failed to parse package.json/); - }); + test("should error with missing script", () => { + runInCwdFailure(cwd_root, "*", "notpresent", /found/); + }); + test("should warn about malformed package.json", () => { + runInCwdFailure(cwd_root, "*", "x", /Failed to parse package.json/); + }); }); From 08af77a0d3d4482d00d2dd403d2aecc9f7405b6e Mon Sep 17 00:00:00 2001 From: Georgijs Vilums Date: Fri, 5 Apr 2024 14:00:05 -0700 Subject: [PATCH 44/74] update docs for filter --- docs/cli/run.md | 18 ++++++++++++++++++ docs/install/workspaces.md | 1 + 2 files changed, 19 insertions(+) diff --git a/docs/cli/run.md b/docs/cli/run.md index effe8d831aa9e..19589c610404e 100644 --- a/docs/cli/run.md +++ b/docs/cli/run.md @@ -151,6 +151,24 @@ By default, Bun respects this shebang and executes the script with `node`. Howev $ bun run --bun vite ``` +### Filtering + +When in monorepos containing multiple packages, you can use the `--filter` argument to execute scripts in many packages at once. + +Use `bun run --filter