Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(cli): --filter flag #8185

Merged
merged 84 commits into from Apr 12, 2024
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
Show all changes
84 commits
Select commit Hold shift + click to select a range
ad920cb
Skeleton code for `bun run --workspace`
Jarred-Sumner Jan 13, 2024
6be006e
Update run_command.zig
Jarred-Sumner Jan 13, 2024
a734de8
implement directory traversal to find workspace root
gvilums Jan 14, 2024
c83507b
finish --workspace implementation
gvilums Jan 15, 2024
1868ad3
clean up changes in run_command.zig
gvilums Jan 15, 2024
496bac0
add workspace tests, update harness to handle nested dirs
gvilums Jan 15, 2024
1b659b3
[autofix.ci] apply automated fixes
autofix-ci[bot] Jan 15, 2024
2f33677
basic filtering
gvilums Jan 16, 2024
ea8ea8d
[autofix.ci] apply automated fixes
autofix-ci[bot] Jan 16, 2024
d8d0ffb
working filter without patterns
gvilums Jan 16, 2024
0c9c555
update tests, filter mostly working
gvilums Jan 17, 2024
f74aad9
simplify package name parsing, commit tests
gvilums Jan 17, 2024
4adbf68
support filter even without workspace setup
gvilums Jan 17, 2024
44ea630
move filter arg handling to separate source file
gvilums Jan 17, 2024
9d17118
use bun.sys.chdir, match root package for scripts
gvilums Jan 17, 2024
e68e7b0
fix exit code handling
gvilums Jan 18, 2024
5cb2d58
Merge branch 'main' into georgijs/bun-run--workspace
Jarred-Sumner Jan 18, 2024
af5728c
ignore node_modules and directories starting with . in --filter
gvilums Jan 18, 2024
ad18255
progress converting --filter to use iterators
gvilums Jan 18, 2024
5b19f98
convert filtering to use iterators
gvilums Jan 18, 2024
a4d1b17
cleanup
gvilums Jan 18, 2024
024d86e
implement DirEntry access method for glob (currently crashing)
gvilums Jan 19, 2024
9819a13
cleanup and fixes
gvilums Jan 19, 2024
c8c21bf
run js files in subprocess when filter flag passed
gvilums Jan 19, 2024
2557763
clean up dead code
gvilums Jan 20, 2024
5e467cc
fix fd leak in run_command.zig
gvilums Jan 20, 2024
7f01fae
Merge branch 'main' into georgijs/bun-run--workspace
gvilums Jan 20, 2024
1a3fbc1
[autofix.ci] apply automated fixes
autofix-ci[bot] Jan 20, 2024
9a7ba61
fix issues after merge
gvilums Jan 20, 2024
63124c3
use posix-spawn in runBinary, fix resource PATH variable resource leak
gvilums Jan 20, 2024
9af23ad
move filter argument to runtime category
gvilums Jan 20, 2024
8ab8941
Merge branch 'main' into georgijs/bun-run--workspace
Jarred-Sumner Jan 21, 2024
26a720d
fix test harness
gvilums Jan 21, 2024
3426873
add js and binary tests to filter-workspace
gvilums Jan 22, 2024
a363f48
[autofix.ci] apply automated fixes
autofix-ci[bot] Jan 22, 2024
5637a7d
Merge remote-tracking branch 'origin/main' into georgijs/bun-run--wor…
gvilums Jan 22, 2024
2c48289
Merge remote-tracking branch 'origin/main' into georgijs/bun-run--wor…
gvilums Jan 22, 2024
03037de
Merge branch 'main' into georgijs/bun-run--workspace
gvilums Apr 4, 2024
5b91ca3
fix compile after merge
gvilums Apr 4, 2024
63d5cd6
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 4, 2024
127a599
clean up filter-workspace test
gvilums Apr 4, 2024
c39e61d
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 4, 2024
f1db7ad
fixes to running binaries
gvilums Apr 4, 2024
7d122b3
fix actually setting cwd_override
gvilums Apr 4, 2024
ca4c96d
windows fixes
gvilums Apr 5, 2024
4faf32d
address some review comments
gvilums Apr 5, 2024
7e7e55d
handle malformed JSON
gvilums Apr 5, 2024
101a893
add various tests
gvilums Apr 5, 2024
b4300bb
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 5, 2024
08af77a
update docs for filter
gvilums Apr 5, 2024
a3f6998
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 5, 2024
a26eca6
reset tinycc commit
gvilums Apr 5, 2024
494fa3d
filtered run prototype
gvilums Apr 8, 2024
a9b9ce0
make pretty
gvilums Apr 8, 2024
d8f76f7
implement abort handler (not working)
gvilums Apr 8, 2024
8e17407
make prettier
gvilums Apr 8, 2024
75864c7
prep for windows
gvilums Apr 9, 2024
000d513
windows path and printing fixes
gvilums Apr 9, 2024
78814e6
implement log-style output (not tui)
gvilums Apr 9, 2024
b8abffd
fix issues when logging to file
gvilums Apr 9, 2024
3f40cc6
Merge remote-tracking branch 'origin/main' into georgijs/bun-run--wor…
gvilums Apr 9, 2024
22dfc45
revert a bunch of unecessary changes
gvilums Apr 9, 2024
ac3ad16
cleanup
gvilums Apr 9, 2024
da3485f
implement dependency order execution
gvilums Apr 9, 2024
6ba7eb3
detect circular dependencies, fix cancel hang
gvilums Apr 10, 2024
defd08f
Fix `$PATH`
Jarred-Sumner Apr 10, 2024
7d6e443
ignore dep order on loop, stream on linux, sort pkgs
gvilums Apr 10, 2024
f781e51
support pre and post scripts
gvilums Apr 10, 2024
4ddddb9
add more filter tests, print elapsed time
gvilums Apr 10, 2024
a5fcbfa
enable 'bun --filter' without run
gvilums Apr 10, 2024
4046074
Merge remote-tracking branch 'origin/main' into georgijs/bun-run--wor…
gvilums Apr 10, 2024
e0726a3
fix harness after merge
gvilums Apr 10, 2024
473a2d4
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 10, 2024
ccfebfb
print number of scripts we're waiting for
gvilums Apr 10, 2024
f7e54d8
update docs, fix windows build
gvilums Apr 11, 2024
c178624
fix tests on windows
gvilums Apr 11, 2024
22846af
[autofix.ci] apply automated fixes
autofix-ci[bot] Apr 11, 2024
c9ff770
fix uninitialized memory
gvilums Apr 11, 2024
5083cf6
use terminal synchronized update sequences
gvilums Apr 11, 2024
beb3d24
Merge branch 'main' into georgijs/bun-run--workspace
Jarred-Sumner Apr 12, 2024
32bfdb1
Merge branch 'main' into georgijs/bun-run--workspace
Jarred-Sumner Apr 12, 2024
66b8a2d
Add skip list
Jarred-Sumner Apr 12, 2024
9c0f1d6
Preallocate
Jarred-Sumner Apr 12, 2024
63c4f73
Use current bun in tests
Jarred-Sumner Apr 12, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 2 additions & 0 deletions CMakeLists.txt
Expand Up @@ -5,6 +5,8 @@ cmake_policy(SET CMP0067 NEW)
set(Bun_VERSION "1.0.23")
set(WEBKIT_TAG b4de09f41b83e9e5c0e43ef414f1aee5968b6f7c)

set(CMAKE_EXPORT_COMPILE_COMMANDS ON)

set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")

Expand Down
10 changes: 5 additions & 5 deletions src/api/schema.zig
Expand Up @@ -1689,8 +1689,8 @@ pub const Api = struct {
/// origin
origin: ?[]const u8 = null,

/// absolute_working_dir
absolute_working_dir: ?[]const u8 = null,
/// cwd_override
cwd_override: ?[]const u8 = null,

/// define
define: ?StringMap = null,
Expand Down Expand Up @@ -1774,7 +1774,7 @@ pub const Api = struct {
this.origin = try reader.readValue([]const u8);
},
5 => {
this.absolute_working_dir = try reader.readValue([]const u8);
this.cwd_override = try reader.readValue([]const u8);
},
6 => {
this.define = try reader.readValue(StringMap);
Expand Down Expand Up @@ -1861,9 +1861,9 @@ pub const Api = struct {
try writer.writeFieldID(4);
try writer.writeValue(@TypeOf(origin), origin);
}
if (this.absolute_working_dir) |absolute_working_dir| {
if (this.cwd_override) |cwd_override| {
try writer.writeFieldID(5);
try writer.writeValue(@TypeOf(absolute_working_dir), absolute_working_dir);
try writer.writeValue(@TypeOf(cwd_override), cwd_override);
}
if (this.define) |define| {
try writer.writeFieldID(6);
Expand Down
2 changes: 1 addition & 1 deletion src/bundler.zig
Expand Up @@ -388,7 +388,7 @@ pub const Bundler = struct {
js_ast.Expr.Data.Store.create(allocator);
js_ast.Stmt.Data.Store.create(allocator);
const fs = try Fs.FileSystem.init(
opts.absolute_working_dir,
opts.cwd_override,
);
const bundle_options = try options.BundleOptions.fromApi(
allocator,
Expand Down
2 changes: 1 addition & 1 deletion src/bundler/bundle_v2.zig
Expand Up @@ -1605,7 +1605,7 @@ pub const BundleV2 = struct {
.define = if (config.define.count() > 0) config.define.toAPI() else null,
.entry_points = config.entry_points.keys(),
.target = config.target.toAPI(),
.absolute_working_dir = if (config.dir.list.items.len > 0) config.dir.toOwnedSliceLeaky() else null,
.cwd_override = if (config.dir.list.items.len > 0) config.dir.toOwnedSliceLeaky() else null,
.inject = &.{},
.external = config.external.keys(),
.main_fields = &.{},
Expand Down
57 changes: 35 additions & 22 deletions src/cli.zig
Expand Up @@ -20,6 +20,9 @@ const js_printer = bun.js_printer;
const js_ast = bun.JSAst;
const linker = @import("linker.zig");
const RegularExpression = bun.RegularExpression;
const Glob = @import("glob.zig");

const Package = @import("install/lockfile.zig").Package;

const sync = @import("./sync.zig");
const Api = @import("api/schema.zig").Api;
Expand Down Expand Up @@ -148,6 +151,9 @@ pub const Arguments = struct {
const base_params_ = [_]ParamType{
clap.parseParam("--env-file <STR>... Load environment variables from the specified file(s)") catch unreachable,
clap.parseParam("--cwd <STR> Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable,
// clap.parseParam("-w, --workspace <STR> Perform the command on the specified workspace member package") catch unreachable,
clap.parseParam("--filter <STR>... Perform the command on all workspace member packages that match the pattern") catch unreachable,
// clap.parseParam("--fail-if-no-match Fail if no packages match the filter") catch unreachable,
clap.parseParam("-c, --config <PATH>? Specify path to Bun config file. Default <d>$cwd<r>/bunfig.toml") catch unreachable,
clap.parseParam("-h, --help Display this menu and exit") catch unreachable,
clap.parseParam("<POS>...") catch unreachable,
Expand Down Expand Up @@ -336,16 +342,17 @@ pub const Arguments = struct {
config_buf[config_path_.len] = 0;
config_path = config_buf[0..config_path_.len :0];
} else {
if (ctx.args.absolute_working_dir == null) {
var secondbuf: [bun.MAX_PATH_BYTES]u8 = undefined;
const cwd = bun.getcwd(&secondbuf) catch return;

ctx.args.absolute_working_dir = try allocator.dupe(u8, cwd);
var cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
var cwd: []const u8 = undefined;
if (ctx.args.cwd_override) |cwd_val| {
cwd = cwd_val;
} else {
cwd = try bun.getcwd(&cwd_buf);
}

var parts = [_]string{ ctx.args.absolute_working_dir.?, config_path_ };
var parts = [_]string{ cwd, config_path_ };
config_path_ = resolve_path.joinAbsStringBuf(
ctx.args.absolute_working_dir.?,
cwd,
&config_buf,
&parts,
.auto,
Expand Down Expand Up @@ -402,20 +409,26 @@ pub const Arguments = struct {
}
}

var cwd: []u8 = undefined;
var cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
var cwd: []const u8 = undefined;
if (args.option("--cwd")) |cwd_| {
cwd = brk: {
var outbuf: [bun.MAX_PATH_BYTES]u8 = undefined;
const out = std.os.realpath(cwd_, &outbuf) catch |err| {
Output.prettyErrorln("error resolving --cwd: {s}", .{@errorName(err)});
Global.exit(1);
};
break :brk try allocator.dupe(u8, out);
var outbuf: [bun.MAX_PATH_BYTES]u8 = undefined;
const out = std.os.realpath(cwd_, &outbuf) catch |err| {
Output.prettyErrorln("error resolving --cwd: {s}", .{@errorName(err)});
Global.exit(1);
};
std.os.chdir(out) catch |err| {
gvilums marked this conversation as resolved.
Show resolved Hide resolved
Output.prettyErrorln("error setting --cwd: {s}", .{@errorName(err)});
Global.exit(1);
};
cwd = try allocator.dupe(u8, out);
ctx.args.cwd_override = cwd;
} else {
cwd = try bun.getcwdAlloc(allocator);
cwd = try bun.getcwd(&cwd_buf);
}

ctx.filters = args.options("--filter");

if (cmd == .TestCommand) {
if (args.option("--timeout")) |timeout_ms| {
if (timeout_ms.len > 0) {
Expand Down Expand Up @@ -472,7 +485,6 @@ pub const Arguments = struct {
ctx.test_options.only = args.flag("--only");
}

ctx.args.absolute_working_dir = cwd;
ctx.positionals = args.positionals();

if (comptime Command.Tag.loads_config.get(cmd)) {
Expand Down Expand Up @@ -1087,6 +1099,8 @@ pub const Command = struct {
bundler_options: BundlerOptions = BundlerOptions{},
runtime_options: RuntimeOptions = RuntimeOptions{},

filters: []const []const u8 = &[_][]const u8{},

preloads: []const string = &[_]string{},
has_loaded_global_config: bool = false,

Expand Down Expand Up @@ -1608,11 +1622,7 @@ pub const Command = struct {
const ctx = try Command.Context.create(allocator, log, .RunCommand);

if (ctx.positionals.len > 0) {
if (try RunCommand.exec(ctx, false, true)) {
return;
}

Global.exit(1);
try RunCommand.execAll(ctx, false);
}
},
.RunAsNodeCommand => {
Expand Down Expand Up @@ -1721,6 +1731,9 @@ pub const Command = struct {
}

if (ctx.positionals.len > 0 and extension.len == 0) {
if (ctx.filters.len > 0) {
Output.prettyln("<r><yellow>warn<r>: Filters are ignored for auto command", .{});
}
if (try RunCommand.exec(ctx, true, false)) {
return;
}
Expand Down
183 changes: 183 additions & 0 deletions src/cli/filter_arg.zig
@@ -0,0 +1,183 @@
const std = @import("std");
const root = @import("root");
const bun = root.bun;
const string = bun.string;
const Output = bun.Output;
const Global = bun.Global;
const strings = bun.strings;
const json_parser = bun.JSON;
const Glob = @import("../glob.zig");

const Package = @import("../install/lockfile.zig").Package;

fn findWorkspaceMembers(allocator: std.mem.Allocator, log: *bun.logger.Log, workspace_map: *Package.WorkspaceMap, workdir_: []const u8) !void {
gvilums marked this conversation as resolved.
Show resolved Hide resolved
bun.JSAst.Expr.Data.Store.create(bun.default_allocator);
bun.JSAst.Stmt.Data.Store.create(bun.default_allocator);

defer {
bun.JSAst.Expr.Data.Store.reset();
bun.JSAst.Stmt.Data.Store.reset();
}

var workdir = workdir_;

while (true) : (workdir = std.fs.path.dirname(workdir) orelse break) {
const parent_trimmed = strings.withoutTrailingSlash(workdir);
var buf2: [bun.MAX_PATH_BYTES + 1]u8 = undefined;
dylan-conway marked this conversation as resolved.
Show resolved Hide resolved
@memcpy(buf2[0..parent_trimmed.len], parent_trimmed);
buf2[parent_trimmed.len..buf2.len][0.."/package.json".len].* = "/package.json".*;
buf2[parent_trimmed.len + "/package.json".len] = 0;
const json_path = buf2[0 .. parent_trimmed.len + "/package.json".len];
log.msgs.clearRetainingCapacity();
log.errors = 0;
log.warnings = 0;

const json_file = std.fs.cwd().openFileZ(
buf2[0 .. parent_trimmed.len + "/package.json".len :0].ptr,
.{ .mode = .read_only },
) catch continue;
defer json_file.close();

const json_stat_size = try json_file.getEndPos();
const json_buf = try allocator.alloc(u8, json_stat_size + 64);
defer allocator.free(json_buf);
const json_len = try json_file.preadAll(json_buf, 0);
const json_source = bun.logger.Source.initPathString(json_path, json_buf[0..json_len]);
const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator);
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We are parsing the package.json files multiple times here since it may happen again later

I'm hesitant to block the PR on this but we should figure out a way to cache this to avoid the overhead

Also, we can simplify this code a bit:

bun.sys.File.toSource(json_path, allocator);


const prop = json.asProperty("workspaces") orelse continue;

const json_array = switch (prop.expr.data) {
.e_array => |arr| arr,
.e_object => |obj| if (obj.get("packages")) |packages| switch (packages.data) {
.e_array => |arr| arr,
else => break,
} else break,
else => break,
};
_ = Package.processWorkspaceNamesArray(
workspace_map,
allocator,
log,
json_array,
&json_source,
prop.loc,
null,
) catch |err| {
return err;
};
return;
}

// if we were not able to find a workspace root, try globbing for package.json files

var walker = Glob.BunGlobWalker{};
gvilums marked this conversation as resolved.
Show resolved Hide resolved
var arena = std.heap.ArenaAllocator.init(allocator);
const walker_init_res = try walker.init(&arena, "**/package.json", true, true, false, true, true);
switch (walker_init_res) {
.err => |err| {
Output.prettyErrorln("Error: {}", .{err});
return;
},
else => {},
}
defer walker.deinit(true);

var iter = Glob.BunGlobWalker.Iterator{ .walker = &walker };
const iter_init_res = try iter.init();
switch (iter_init_res) {
.err => |err| {
Output.prettyErrorln("Error: {}", .{err});
return;
},
else => {},
}
defer iter.deinit();

while (true) {
const next = try iter.next();
const path = switch (next) {
.err => |err| {
Output.prettyErrorln("Error: {}", .{err});
continue;
},
.result => |path| path orelse break,
};

const json_file = std.fs.cwd().openFile(
path,
.{ .mode = .read_only },
) catch {
continue;
};
defer json_file.close();

const json_stat_size = try json_file.getEndPos();
const json_buf = try allocator.alloc(u8, json_stat_size + 64);
defer allocator.free(json_buf);

const json_len = try json_file.preadAll(json_buf, 0);
const json_source = bun.logger.Source.initPathString(path, json_buf[0..json_len]);

var parser = try json_parser.PackageJSONVersionChecker.init(allocator, &json_source, log);
_ = try parser.parseExpr();
if (!parser.has_found_name) {
continue;
}
const entry = Package.WorkspaceMap.Entry{ .name = try allocator.dupe(u8, parser.found_name), .version = null, .name_loc = bun.logger.Loc.Empty };
const dirpath = std.fs.path.dirname(path) orelse continue;
try workspace_map.insert(try allocator.dupe(u8, dirpath), entry);
gvilums marked this conversation as resolved.
Show resolved Hide resolved
}
}

pub fn getFilteredPackages(ctx: bun.CLI.Command.Context, cwd: []const u8, paths: *std.ArrayList([]u8)) !void {
// TODO in the future we can try loading the lockfile to get the workspace information more quickly
// var manager = try PackageManager.init(ctx, PackageManager.Subcommand.pm);
// const load_lockfile = manager.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb");
// if (load_lockfile == .not_found) {

// find the paths of all projects that match this filter

var wsmap = Package.WorkspaceMap.init(ctx.allocator);
defer wsmap.deinit();
// find the root package.json of the workspace and load the child packages into workspace map
findWorkspaceMembers(ctx.allocator, ctx.log, &wsmap, cwd) catch |err| {
if (comptime bun.Environment.allow_assert) {
if (@errorReturnTrace()) |trace| {
std.debug.print("Error: {s}\n{}\n", .{ @errorName(err), trace });
}
}
Output.err(err, "Failed to find workspace root in {s}", .{cwd});
ctx.log.printForLogLevelColorsRuntime(Output.errorWriter(), Output.enable_ansi_colors) catch {};
Global.exit(1);
};

var pattern_stack = std.heap.stackFallback(4096, bun.default_allocator);
var pattern = std.ArrayList(u32).init(pattern_stack.get());
defer pattern.deinit();

// check each pattern against each package name
for (ctx.filters) |pattern_utf8_| {
var pattern_utf8 = pattern_utf8_;
var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;

const is_file_pattern = pattern_utf8.len > 0 and pattern_utf8[0] == '.';
if (is_file_pattern) {
const parts = [_]string{pattern_utf8};
pattern_utf8 = bun.path.joinAbsStringBuf(cwd, &path_buf, &parts, .auto);
}

pattern.clearRetainingCapacity();
var codepointer_iter = strings.UnsignedCodepointIterator.init(pattern_utf8);
var cursor = strings.UnsignedCodepointIterator.Cursor{};
while (codepointer_iter.next(&cursor)) {
try pattern.append(cursor.c);
}
for (wsmap.keys(), wsmap.values()) |path, entry| {
const target = if (is_file_pattern) path else entry.name;
if (Glob.matchImpl(pattern.items, target)) {
try paths.append(try ctx.allocator.dupe(u8, path));
}
}
}
}