Merge pull request #20269 from ziglang/fix-progress-race

This commit is contained in:
Andrew Kelley 2024-06-12 13:20:14 -04:00 committed by GitHub
commit 44f4abf380
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 20 additions and 16 deletions

View File

@ -734,7 +734,7 @@ const Serialized = struct {
const Buffer = struct { const Buffer = struct {
parents: [node_storage_buffer_len]Node.Parent, parents: [node_storage_buffer_len]Node.Parent,
storage: [node_storage_buffer_len]Node.Storage, storage: [node_storage_buffer_len]Node.Storage,
map: [node_storage_buffer_len]Node.Index, map: [node_storage_buffer_len]Node.OptionalIndex,
parents_copy: [node_storage_buffer_len]Node.Parent, parents_copy: [node_storage_buffer_len]Node.Parent,
storage_copy: [node_storage_buffer_len]Node.Storage, storage_copy: [node_storage_buffer_len]Node.Storage,
@ -753,9 +753,11 @@ fn serialize(serialized_buffer: *Serialized.Buffer) Serialized {
// Iterate all of the nodes and construct a serializable copy of the state that can be examined // Iterate all of the nodes and construct a serializable copy of the state that can be examined
// without atomics. // without atomics.
const end_index = @atomicLoad(u32, &global_progress.node_end_index, .monotonic); const end_index = @atomicLoad(u32, &global_progress.node_end_index, .monotonic);
const node_parents = global_progress.node_parents[0..end_index]; for (
const node_storage = global_progress.node_storage[0..end_index]; global_progress.node_parents[0..end_index],
for (node_parents, node_storage, 0..) |*parent_ptr, *storage_ptr, i| { global_progress.node_storage[0..end_index],
serialized_buffer.map[0..end_index],
) |*parent_ptr, *storage_ptr, *map| {
var begin_parent = @atomicLoad(Node.Parent, parent_ptr, .acquire); var begin_parent = @atomicLoad(Node.Parent, parent_ptr, .acquire);
while (begin_parent != .unused) { while (begin_parent != .unused) {
const dest_storage = &serialized_buffer.storage[serialized_len]; const dest_storage = &serialized_buffer.storage[serialized_len];
@ -766,12 +768,19 @@ fn serialize(serialized_buffer: *Serialized.Buffer) Serialized {
if (begin_parent == end_parent) { if (begin_parent == end_parent) {
any_ipc = any_ipc or (dest_storage.getIpcFd() != null); any_ipc = any_ipc or (dest_storage.getIpcFd() != null);
serialized_buffer.parents[serialized_len] = begin_parent; serialized_buffer.parents[serialized_len] = begin_parent;
serialized_buffer.map[i] = @enumFromInt(serialized_len); map.* = @enumFromInt(serialized_len);
serialized_len += 1; serialized_len += 1;
break; break;
} }
begin_parent = end_parent; begin_parent = end_parent;
} else {
// A node may be freed during the execution of this loop, causing
// there to be a parent reference to a nonexistent node. Without
// this assignment, this would lead to the map entry containing
// stale data. By assigning none, the child node with the bad
// parent pointer will be harmlessly omitted from the tree.
map.* = .none;
} }
} }

View File

@ -1038,8 +1038,7 @@ pub fn main() anyerror!void {
var zig_src_dir = try fs.cwd().openDir(zig_src_root, .{}); var zig_src_dir = try fs.cwd().openDir(zig_src_root, .{});
defer zig_src_dir.close(); defer zig_src_dir.close();
var progress = std.Progress{}; const root_progress = std.Progress.start(.{ .estimated_total_items = llvm_targets.len });
const root_progress = progress.start("", llvm_targets.len);
defer root_progress.end(); defer root_progress.end();
if (builtin.single_threaded) { if (builtin.single_threaded) {
@ -1074,7 +1073,7 @@ const Job = struct {
llvm_tblgen_exe: []const u8, llvm_tblgen_exe: []const u8,
llvm_src_root: []const u8, llvm_src_root: []const u8,
zig_src_dir: std.fs.Dir, zig_src_dir: std.fs.Dir,
root_progress: *std.Progress.Node, root_progress: std.Progress.Node,
llvm_target: LlvmTarget, llvm_target: LlvmTarget,
}; };
@ -1085,12 +1084,10 @@ fn processOneTarget(job: Job) anyerror!void {
defer arena_state.deinit(); defer arena_state.deinit();
const arena = arena_state.allocator(); const arena = arena_state.allocator();
var progress_node = job.root_progress.start(llvm_target.zig_name, 3); const progress_node = job.root_progress.start(llvm_target.zig_name, 3);
progress_node.activate();
defer progress_node.end(); defer progress_node.end();
var tblgen_progress = progress_node.start("invoke llvm-tblgen", 0); const tblgen_progress = progress_node.start("invoke llvm-tblgen", 0);
tblgen_progress.activate();
const child_args = [_][]const u8{ const child_args = [_][]const u8{
job.llvm_tblgen_exe, job.llvm_tblgen_exe,
@ -1127,16 +1124,14 @@ fn processOneTarget(job: Job) anyerror!void {
}, },
}; };
var json_parse_progress = progress_node.start("parse JSON", 0); const json_parse_progress = progress_node.start("parse JSON", 0);
json_parse_progress.activate();
const parsed = try json.parseFromSlice(json.Value, arena, json_text, .{}); const parsed = try json.parseFromSlice(json.Value, arena, json_text, .{});
defer parsed.deinit(); defer parsed.deinit();
const root_map = &parsed.value.object; const root_map = &parsed.value.object;
json_parse_progress.end(); json_parse_progress.end();
var render_progress = progress_node.start("render zig code", 0); const render_progress = progress_node.start("render zig code", 0);
render_progress.activate();
var features_table = std.StringHashMap(Feature).init(arena); var features_table = std.StringHashMap(Feature).init(arena);
var all_features = std.ArrayList(Feature).init(arena); var all_features = std.ArrayList(Feature).init(arena);