Merge pull request #11300 from ziglang/stage2-debug-error-sets

This commit is contained in:
Jakub Konka 2022-03-28 07:40:33 +02:00 committed by GitHub
commit b8cd56dc94
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 562 additions and 429 deletions

View File

@ -386,18 +386,19 @@ fn dbgAdvancePCAndLine(self: *Emit, line: u32, column: u32) !void {
const delta_line = @intCast(i32, line) - @intCast(i32, self.prev_di_line);
const delta_pc: usize = self.code.items.len - self.prev_di_pc;
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
// TODO Look into using the DWARF special opcodes to compress this data.
// It lets you emit single-byte opcodes that add different numbers to
// both the PC and the line number at the same time.
try dbg_out.dbg_line.ensureUnusedCapacity(11);
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_out.dbg_line.writer(), delta_pc) catch unreachable;
const dbg_line = dw.getDeclDebugLineBuffer();
try dbg_line.ensureUnusedCapacity(11);
dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_line.writer(), delta_pc) catch unreachable;
if (delta_line != 0) {
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_out.dbg_line.writer(), delta_line) catch unreachable;
dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_line.writer(), delta_line) catch unreachable;
}
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.copy);
dbg_line.appendAssumeCapacity(DW.LNS.copy);
self.prev_di_pc = self.code.items.len;
self.prev_di_line = line;
self.prev_di_column = column;
@ -586,8 +587,8 @@ fn mirDbgLine(emit: *Emit, inst: Mir.Inst.Index) !void {
fn mirDebugPrologueEnd(self: *Emit) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_prologue_end);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_prologue_end);
try self.dbgAdvancePCAndLine(self.prev_di_line, self.prev_di_column);
},
.plan9 => {},
@ -597,8 +598,8 @@ fn mirDebugPrologueEnd(self: *Emit) !void {
fn mirDebugEpilogueBegin(self: *Emit) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_epilogue_begin);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_epilogue_begin);
try self.dbgAdvancePCAndLine(self.prev_di_line, self.prev_di_column);
},
.plan9 => {},

View File

@ -328,18 +328,19 @@ fn dbgAdvancePCAndLine(self: *Emit, line: u32, column: u32) !void {
const delta_line = @intCast(i32, line) - @intCast(i32, self.prev_di_line);
const delta_pc: usize = self.code.items.len - self.prev_di_pc;
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
// TODO Look into using the DWARF special opcodes to compress this data.
// It lets you emit single-byte opcodes that add different numbers to
// both the PC and the line number at the same time.
try dbg_out.dbg_line.ensureUnusedCapacity(11);
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_out.dbg_line.writer(), delta_pc) catch unreachable;
const dbg_line = dw.getDeclDebugLineBuffer();
try dbg_line.ensureUnusedCapacity(11);
dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_line.writer(), delta_pc) catch unreachable;
if (delta_line != 0) {
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_out.dbg_line.writer(), delta_line) catch unreachable;
dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_line.writer(), delta_line) catch unreachable;
}
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.copy);
dbg_line.appendAssumeCapacity(DW.LNS.copy);
self.prev_di_pc = self.code.items.len;
self.prev_di_line = line;
self.prev_di_column = column;
@ -379,19 +380,17 @@ fn dbgAdvancePCAndLine(self: *Emit, line: u32, column: u32) !void {
/// after codegen for this symbol is done.
fn addDbgInfoTypeReloc(self: *Emit, ty: Type) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
assert(ty.hasRuntimeBits());
const index = dbg_out.dbg_info.items.len;
try dbg_out.dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const gop = try dbg_out.dbg_info_type_relocs.getOrPutContext(self.bin_file.allocator, ty, .{ .target = self.target.* });
if (!gop.found_existing) {
gop.value_ptr.* = .{
.off = undefined,
.relocs = .{},
};
}
try gop.value_ptr.relocs.append(self.bin_file.allocator, @intCast(u32, index));
const dbg_info = dw.getDeclDebugInfoBuffer();
const index = dbg_info.items.len;
try dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const atom = switch (self.bin_file.tag) {
.elf => &self.function.mod_fn.owner_decl.link.elf.dbg_info_atom,
.macho => unreachable,
else => unreachable,
};
try dw.addTypeReloc(atom, ty, @intCast(u32, index), null);
},
.plan9 => {},
.none => {},
@ -409,16 +408,17 @@ fn genArgDbgInfo(self: *Emit, inst: Air.Inst.Index, arg_index: u32) !void {
switch (mcv) {
.register => |reg| {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_info.ensureUnusedCapacity(3);
dbg_out.dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_out.dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
.dwarf => |dw| {
const dbg_info = dw.getDeclDebugInfoBuffer();
try dbg_info.ensureUnusedCapacity(3);
dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
1, // ULEB128 dwarf expression length
reg.dwarfLocOp(),
});
try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try self.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
},
.plan9 => {},
.none => {},
@ -428,7 +428,7 @@ fn genArgDbgInfo(self: *Emit, inst: Air.Inst.Index, arg_index: u32) !void {
.stack_argument_offset,
=> {
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
const abi_size = math.cast(u32, ty.abiSize(self.target.*)) catch {
return self.fail("type '{}' too big to fit into stack frame", .{ty.fmt(target)});
};
@ -442,7 +442,8 @@ fn genArgDbgInfo(self: *Emit, inst: Air.Inst.Index, arg_index: u32) !void {
else => unreachable,
};
try dbg_out.dbg_info.append(link.File.Dwarf.abbrev_parameter);
const dbg_info = dw.getDeclDebugInfoBuffer();
try dbg_info.append(link.File.Dwarf.abbrev_parameter);
// Get length of the LEB128 stack offset
var counting_writer = std.io.countingWriter(std.io.null_writer);
@ -450,13 +451,13 @@ fn genArgDbgInfo(self: *Emit, inst: Air.Inst.Index, arg_index: u32) !void {
// DW.AT.location, DW.FORM.exprloc
// ULEB128 dwarf expression length
try leb128.writeULEB128(dbg_out.dbg_info.writer(), counting_writer.bytes_written + 1);
try dbg_out.dbg_info.append(DW.OP.breg11);
try leb128.writeILEB128(dbg_out.dbg_info.writer(), adjusted_stack_offset);
try leb128.writeULEB128(dbg_info.writer(), counting_writer.bytes_written + 1);
try dbg_info.append(DW.OP.breg11);
try leb128.writeILEB128(dbg_info.writer(), adjusted_stack_offset);
try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try self.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
},
.plan9 => {},
.none => {},
@ -558,8 +559,8 @@ fn mirDbgLine(emit: *Emit, inst: Mir.Inst.Index) !void {
fn mirDebugPrologueEnd(emit: *Emit) !void {
switch (emit.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_prologue_end);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_prologue_end);
try emit.dbgAdvancePCAndLine(emit.prev_di_line, emit.prev_di_column);
},
.plan9 => {},
@ -569,8 +570,8 @@ fn mirDebugPrologueEnd(emit: *Emit) !void {
fn mirDebugEpilogueBegin(emit: *Emit) !void {
switch (emit.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_epilogue_begin);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_epilogue_begin);
try emit.dbgAdvancePCAndLine(emit.prev_di_line, emit.prev_di_column);
},
.plan9 => {},

View File

@ -745,21 +745,17 @@ fn ensureProcessDeathCapacity(self: *Self, additional_count: usize) !void {
/// after codegen for this symbol is done.
fn addDbgInfoTypeReloc(self: *Self, ty: Type) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
assert(ty.hasRuntimeBits());
const index = dbg_out.dbg_info.items.len;
try dbg_out.dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const gop = try dbg_out.dbg_info_type_relocs.getOrPutContext(self.gpa, ty, .{
.target = self.target.*,
});
if (!gop.found_existing) {
gop.value_ptr.* = .{
.off = undefined,
.relocs = .{},
};
}
try gop.value_ptr.relocs.append(self.gpa, @intCast(u32, index));
const dbg_info = dw.getDeclDebugInfoBuffer();
const index = dbg_info.items.len;
try dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const atom = switch (self.bin_file.tag) {
.elf => &self.mod_fn.owner_decl.link.elf.dbg_info_atom,
.macho => unreachable,
else => unreachable,
};
try dw.addTypeReloc(atom, ty, @intCast(u32, index), null);
},
.plan9 => {},
.none => {},
@ -1573,16 +1569,17 @@ fn genArgDbgInfo(self: *Self, inst: Air.Inst.Index, mcv: MCValue, arg_index: u32
switch (mcv) {
.register => |reg| {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_info.ensureUnusedCapacity(3);
dbg_out.dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_out.dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
.dwarf => |dw| {
const dbg_info = dw.getDeclDebugInfoBuffer();
try dbg_info.ensureUnusedCapacity(3);
dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
1, // ULEB128 dwarf expression length
reg.dwarfLocOp(),
});
try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try self.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
},
.plan9 => {},
.none => {},

View File

@ -89,18 +89,19 @@ fn dbgAdvancePCAndLine(self: *Emit, line: u32, column: u32) !void {
const delta_line = @intCast(i32, line) - @intCast(i32, self.prev_di_line);
const delta_pc: usize = self.code.items.len - self.prev_di_pc;
switch (self.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
// TODO Look into using the DWARF special opcodes to compress this data.
// It lets you emit single-byte opcodes that add different numbers to
// both the PC and the line number at the same time.
try dbg_out.dbg_line.ensureUnusedCapacity(11);
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_out.dbg_line.writer(), delta_pc) catch unreachable;
const dbg_line = dw.getDeclDebugLineBuffer();
try dbg_line.ensureUnusedCapacity(11);
dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_line.writer(), delta_pc) catch unreachable;
if (delta_line != 0) {
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_out.dbg_line.writer(), delta_line) catch unreachable;
dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_line.writer(), delta_line) catch unreachable;
}
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.copy);
dbg_line.appendAssumeCapacity(DW.LNS.copy);
self.prev_di_pc = self.code.items.len;
self.prev_di_line = line;
self.prev_di_column = column;
@ -182,8 +183,8 @@ fn mirDbgLine(emit: *Emit, inst: Mir.Inst.Index) !void {
fn mirDebugPrologueEnd(self: *Emit) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_prologue_end);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_prologue_end);
try self.dbgAdvancePCAndLine(self.prev_di_line, self.prev_di_column);
},
.plan9 => {},
@ -193,8 +194,8 @@ fn mirDebugPrologueEnd(self: *Emit) !void {
fn mirDebugEpilogueBegin(self: *Emit) !void {
switch (self.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_epilogue_begin);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_epilogue_begin);
try self.dbgAdvancePCAndLine(self.prev_di_line, self.prev_di_column);
},
.plan9 => {},

View File

@ -350,6 +350,7 @@ pub fn generate(
var emit = Emit{
.mir = mir,
.bin_file = bin_file,
.function = &function,
.debug_output = debug_output,
.target = &bin_file.options.target,
.src_loc = src_loc,

View File

@ -16,6 +16,7 @@ const testing = std.testing;
const Air = @import("../../Air.zig");
const Allocator = mem.Allocator;
const CodeGen = @import("CodeGen.zig");
const DebugInfoOutput = @import("../../codegen.zig").DebugInfoOutput;
const DW = std.dwarf;
const Encoder = bits.Encoder;
@ -29,6 +30,7 @@ const Type = @import("../../type.zig").Type;
mir: Mir,
bin_file: *link.File,
function: *const CodeGen,
debug_output: DebugInfoOutput,
target: *const std.Target,
err_msg: ?*ErrorMsg = null,
@ -963,18 +965,19 @@ fn dbgAdvancePCAndLine(emit: *Emit, line: u32, column: u32) InnerError!void {
const delta_pc: usize = emit.code.items.len - emit.prev_di_pc;
log.debug(" (advance pc={d} and line={d})", .{ delta_line, delta_pc });
switch (emit.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
// TODO Look into using the DWARF special opcodes to compress this data.
// It lets you emit single-byte opcodes that add different numbers to
// both the PC and the line number at the same time.
try dbg_out.dbg_line.ensureUnusedCapacity(11);
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_out.dbg_line.writer(), delta_pc) catch unreachable;
const dbg_line = dw.getDeclDebugLineBuffer();
try dbg_line.ensureUnusedCapacity(11);
dbg_line.appendAssumeCapacity(DW.LNS.advance_pc);
leb128.writeULEB128(dbg_line.writer(), delta_pc) catch unreachable;
if (delta_line != 0) {
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_out.dbg_line.writer(), delta_line) catch unreachable;
dbg_line.appendAssumeCapacity(DW.LNS.advance_line);
leb128.writeILEB128(dbg_line.writer(), delta_line) catch unreachable;
}
dbg_out.dbg_line.appendAssumeCapacity(DW.LNS.copy);
dbg_line.appendAssumeCapacity(DW.LNS.copy);
emit.prev_di_line = line;
emit.prev_di_column = column;
emit.prev_di_pc = emit.code.items.len;
@ -1022,8 +1025,8 @@ fn mirDbgPrologueEnd(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
const tag = emit.mir.instructions.items(.tag)[inst];
assert(tag == .dbg_prologue_end);
switch (emit.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_prologue_end);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_prologue_end);
log.debug("mirDbgPrologueEnd (line={d}, col={d})", .{ emit.prev_di_line, emit.prev_di_column });
try emit.dbgAdvancePCAndLine(emit.prev_di_line, emit.prev_di_column);
},
@ -1036,8 +1039,8 @@ fn mirDbgEpilogueBegin(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
const tag = emit.mir.instructions.items(.tag)[inst];
assert(tag == .dbg_epilogue_begin);
switch (emit.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_line.append(DW.LNS.set_epilogue_begin);
.dwarf => |dw| {
try dw.getDeclDebugLineBuffer().append(DW.LNS.set_epilogue_begin);
log.debug("mirDbgEpilogueBegin (line={d}, col={d})", .{ emit.prev_di_line, emit.prev_di_column });
try emit.dbgAdvancePCAndLine(emit.prev_di_line, emit.prev_di_column);
},
@ -1063,16 +1066,17 @@ fn genArgDbgInfo(emit: *Emit, inst: Air.Inst.Index, mcv: MCValue, max_stack: u32
switch (mcv) {
.register => |reg| {
switch (emit.debug_output) {
.dwarf => |dbg_out| {
try dbg_out.dbg_info.ensureUnusedCapacity(3);
dbg_out.dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_out.dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
.dwarf => |dw| {
const dbg_info = dw.getDeclDebugInfoBuffer();
try dbg_info.ensureUnusedCapacity(3);
dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
1, // ULEB128 dwarf expression length
reg.dwarfLocOp(),
});
try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try emit.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
},
.plan9 => {},
.none => {},
@ -1080,25 +1084,26 @@ fn genArgDbgInfo(emit: *Emit, inst: Air.Inst.Index, mcv: MCValue, max_stack: u32
},
.stack_offset => |off| {
switch (emit.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
// we add here +16 like we do in airArg in CodeGen since we refer directly to
// rbp as the start of function frame minus 8 bytes for caller's rbp preserved in the
// prologue, and 8 bytes for return address.
// TODO we need to make this more generic if we don't use rbp as the frame pointer
// for example when -fomit-frame-pointer is set.
const disp = @intCast(i32, max_stack) - off + 16;
try dbg_out.dbg_info.ensureUnusedCapacity(8);
dbg_out.dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
const fixup = dbg_out.dbg_info.items.len;
dbg_out.dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
const dbg_info = dw.getDeclDebugInfoBuffer();
try dbg_info.ensureUnusedCapacity(8);
dbg_info.appendAssumeCapacity(link.File.Dwarf.abbrev_parameter);
const fixup = dbg_info.items.len;
dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
1, // we will backpatch it after we encode the displacement in LEB128
DW.OP.breg6, // .rbp TODO handle -fomit-frame-pointer
});
leb128.writeILEB128(dbg_out.dbg_info.writer(), disp) catch unreachable;
dbg_out.dbg_info.items[fixup] += @intCast(u8, dbg_out.dbg_info.items.len - fixup - 2);
try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
leb128.writeILEB128(dbg_info.writer(), disp) catch unreachable;
dbg_info.items[fixup] += @intCast(u8, dbg_info.items.len - fixup - 2);
try dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
try emit.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
},
.plan9 => {},
@ -1113,21 +1118,17 @@ fn genArgDbgInfo(emit: *Emit, inst: Air.Inst.Index, mcv: MCValue, max_stack: u32
/// after codegen for this symbol is done.
fn addDbgInfoTypeReloc(emit: *Emit, ty: Type) !void {
switch (emit.debug_output) {
.dwarf => |dbg_out| {
.dwarf => |dw| {
assert(ty.hasRuntimeBits());
const index = dbg_out.dbg_info.items.len;
try dbg_out.dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const gop = try dbg_out.dbg_info_type_relocs.getOrPutContext(emit.bin_file.allocator, ty, .{
.target = emit.target.*,
});
if (!gop.found_existing) {
gop.value_ptr.* = .{
.off = undefined,
.relocs = .{},
};
}
try gop.value_ptr.relocs.append(emit.bin_file.allocator, @intCast(u32, index));
const dbg_info = dw.getDeclDebugInfoBuffer();
const index = dbg_info.items.len;
try dbg_info.resize(index + 4); // DW.AT.type, DW.FORM.ref4
const atom = switch (emit.bin_file.tag) {
.elf => &emit.function.mod_fn.owner_decl.link.elf.dbg_info_atom,
.macho => &emit.function.mod_fn.owner_decl.link.macho.dbg_info_atom,
else => unreachable,
};
try dw.addTypeReloc(atom, ty, @intCast(u32, index), null);
},
.plan9 => {},
.none => {},

View File

@ -1,26 +1,25 @@
const std = @import("std");
const build_options = @import("build_options");
const builtin = @import("builtin");
const assert = std.debug.assert;
const leb128 = std.leb;
const link = @import("link.zig");
const log = std.log.scoped(.codegen);
const mem = std.mem;
const math = std.math;
const assert = std.debug.assert;
const trace = @import("tracy.zig").trace;
const Air = @import("Air.zig");
const Zir = @import("Zir.zig");
const Liveness = @import("Liveness.zig");
const Type = @import("type.zig").Type;
const Value = @import("value.zig").Value;
const TypedValue = @import("TypedValue.zig");
const link = @import("link.zig");
const Module = @import("Module.zig");
const Allocator = mem.Allocator;
const Compilation = @import("Compilation.zig");
const ErrorMsg = Module.ErrorMsg;
const Liveness = @import("Liveness.zig");
const Module = @import("Module.zig");
const Target = std.Target;
const Allocator = mem.Allocator;
const trace = @import("tracy.zig").trace;
const DW = std.dwarf;
const leb128 = std.leb;
const log = std.log.scoped(.codegen);
const build_options = @import("build_options");
const RegisterManager = @import("register_manager.zig").RegisterManager;
const Type = @import("type.zig").Type;
const TypedValue = @import("TypedValue.zig");
const Value = @import("value.zig").Value;
const Zir = @import("Zir.zig");
pub const FnResult = union(enum) {
/// The `code` parameter passed to `generateSymbol` has the value appended.
@ -43,11 +42,7 @@ pub const GenerateSymbolError = error{
};
pub const DebugInfoOutput = union(enum) {
dwarf: struct {
dbg_line: *std.ArrayList(u8),
dbg_info: *std.ArrayList(u8),
dbg_info_type_relocs: *link.File.DbgInfoTypeRelocsTable,
},
dwarf: *link.File.Dwarf,
/// the plan9 debuginfo output is a bytecode with 4 opcodes
/// assume all numbers/variables are bytes
/// 0 w x y z -> interpret w x y z as a big-endian i32, and add it to the line offset
@ -573,7 +568,6 @@ pub fn generateSymbol(
return Result{ .appended = {} };
},
.Union => {
// TODO generate debug info for unions
const union_obj = typed_value.val.castTag(.@"union").?.data;
const layout = typed_value.ty.unionGetLayout(target);
@ -695,7 +689,6 @@ pub fn generateSymbol(
return Result{ .appended = {} };
},
.ErrorUnion => {
// TODO generate debug info for error unions
const error_ty = typed_value.ty.errorUnionSet();
const payload_ty = typed_value.ty.errorUnionPayload();
const is_payload = typed_value.val.errorUnionIsPayload();
@ -749,7 +742,6 @@ pub fn generateSymbol(
return Result{ .appended = {} };
},
.ErrorSet => {
// TODO generate debug info for error sets
switch (typed_value.val.tag()) {
.@"error" => {
const name = typed_value.val.getError().?;

View File

@ -1,22 +1,22 @@
const std = @import("std");
const build_options = @import("build_options");
const builtin = @import("builtin");
const mem = std.mem;
const Allocator = std.mem.Allocator;
const fs = std.fs;
const log = std.log.scoped(.link);
const assert = std.debug.assert;
const Compilation = @import("Compilation.zig");
const Module = @import("Module.zig");
const fs = std.fs;
const mem = std.mem;
const log = std.log.scoped(.link);
const trace = @import("tracy.zig").trace;
const wasi_libc = @import("wasi_libc.zig");
const Air = @import("Air.zig");
const Allocator = std.mem.Allocator;
const Cache = @import("Cache.zig");
const Compilation = @import("Compilation.zig");
const LibCInstallation = @import("libc_installation.zig").LibCInstallation;
const Liveness = @import("Liveness.zig");
const Module = @import("Module.zig");
const Package = @import("Package.zig");
const Type = @import("type.zig").Type;
const Cache = @import("Cache.zig");
const build_options = @import("build_options");
const LibCInstallation = @import("libc_installation.zig").LibCInstallation;
const wasi_libc = @import("wasi_libc.zig");
const Air = @import("Air.zig");
const Liveness = @import("Liveness.zig");
const TypedValue = @import("TypedValue.zig");
pub const SystemLib = struct {
@ -245,24 +245,6 @@ pub const File = struct {
nvptx: void,
};
/// For DWARF .debug_info.
pub const DbgInfoTypeRelocsTable = std.ArrayHashMapUnmanaged(
Type,
DbgInfoTypeReloc,
Type.HashContext32,
true,
);
/// For DWARF .debug_info.
pub const DbgInfoTypeReloc = struct {
/// Offset from `TextBlock.dbg_info_off` (the buffer that is local to a Decl).
/// This is where the .debug_info tag for the type is.
off: u32,
/// Offset from `TextBlock.dbg_info_off` (the buffer that is local to a Decl).
/// List of DW.AT.type / DW.FORM.ref4 that points to the type.
relocs: std.ArrayListUnmanaged(u32),
};
/// Attempts incremental linking, if the file already exists. If
/// incremental linking fails, falls back to truncating the file and
/// rewriting it. A malicious file is detected as incremental link failure

View File

@ -31,27 +31,89 @@ dbg_line_fn_free_list: std.AutoHashMapUnmanaged(*SrcFn, void) = .{},
dbg_line_fn_first: ?*SrcFn = null,
dbg_line_fn_last: ?*SrcFn = null,
/// A list of `TextBlock` whose corresponding .debug_info tags have surplus capacity. /// This is the same concept as `text_block_free_list`; see those doc comments.
dbg_info_decl_free_list: std.AutoHashMapUnmanaged(*DebugInfoAtom, void) = .{},
dbg_info_decl_first: ?*DebugInfoAtom = null,
dbg_info_decl_last: ?*DebugInfoAtom = null,
/// A list of `Atom`s whose corresponding .debug_info tags have surplus capacity.
/// This is the same concept as `text_block_free_list`; see those doc comments.
atom_free_list: std.AutoHashMapUnmanaged(*Atom, void) = .{},
atom_first: ?*Atom = null,
atom_last: ?*Atom = null,
abbrev_table_offset: ?u64 = null,
/// TODO replace with InternArena
/// Table of debug symbol names.
strtab: std.ArrayListUnmanaged(u8) = .{},
pub const DebugInfoAtom = struct {
/// Lives only as long as the analysed Decl.
/// Allocated with `initDeclState`.
/// Freed with `commitDeclState`.
decl_state: ?DeclState = null,
/// List of atoms that are owned directly by the DWARF module.
/// TODO convert links in DebugInfoAtom into indices and make
/// sure every atom is owned by this module.
managed_atoms: std.ArrayListUnmanaged(*Atom) = .{},
global_abbrev_relocs: std.ArrayListUnmanaged(AbbrevRelocation) = .{},
pub const Atom = struct {
/// Previous/next linked list pointers.
/// This is the linked list node for this Decl's corresponding .debug_info tag.
prev: ?*DebugInfoAtom,
next: ?*DebugInfoAtom,
prev: ?*Atom,
next: ?*Atom,
/// Offset into .debug_info pointing to the tag for this Decl.
off: u32,
/// Size of the .debug_info tag for this Decl, not including padding.
len: u32,
};
/// Represents state of the analysed Decl.
/// Includes Decl's abbrev table of type Types, matching arena
/// and a set of relocations that will be resolved once this
/// Decl's inner Atom is assigned an offset within the DWARF section.
pub const DeclState = struct {
dbg_line: std.ArrayList(u8),
dbg_info: std.ArrayList(u8),
abbrev_type_arena: std.heap.ArenaAllocator,
abbrev_table: std.ArrayListUnmanaged(AbbrevEntry) = .{},
abbrev_resolver: std.HashMapUnmanaged(
Type,
u32,
Type.HashContext64,
std.hash_map.default_max_load_percentage,
) = .{},
abbrev_relocs: std.ArrayListUnmanaged(AbbrevRelocation) = .{},
fn init(gpa: Allocator) DeclState {
return .{
.dbg_line = std.ArrayList(u8).init(gpa),
.dbg_info = std.ArrayList(u8).init(gpa),
.abbrev_type_arena = std.heap.ArenaAllocator.init(gpa),
};
}
fn deinit(self: *DeclState, gpa: Allocator) void {
self.dbg_line.deinit();
self.dbg_info.deinit();
self.abbrev_type_arena.deinit();
self.abbrev_table.deinit(gpa);
self.abbrev_resolver.deinit(gpa);
self.abbrev_relocs.deinit(gpa);
}
};
pub const AbbrevEntry = struct {
atom: *const Atom,
@"type": Type,
offset: u32,
};
pub const AbbrevRelocation = struct {
target: u32,
atom: *const Atom,
offset: u32,
addend: u32,
};
pub const SrcFn = struct {
/// Offset from the beginning of the Debug Line Program header that contains this function.
off: u32,
@ -117,29 +179,32 @@ pub fn init(allocator: Allocator, tag: File.Tag, target: std.Target) Dwarf {
pub fn deinit(self: *Dwarf) void {
const gpa = self.allocator;
self.dbg_line_fn_free_list.deinit(gpa);
self.dbg_info_decl_free_list.deinit(gpa);
self.atom_free_list.deinit(gpa);
self.strtab.deinit(gpa);
self.global_abbrev_relocs.deinit(gpa);
for (self.managed_atoms.items) |atom| {
gpa.destroy(atom);
}
self.managed_atoms.deinit(gpa);
}
pub const DeclDebugBuffers = struct {
dbg_line_buffer: std.ArrayList(u8),
dbg_info_buffer: std.ArrayList(u8),
dbg_info_type_relocs: File.DbgInfoTypeRelocsTable,
};
pub fn initDeclDebugInfo(self: *Dwarf, decl: *Module.Decl) !DeclDebugBuffers {
/// Initializes Decl's state and its matching output buffers.
/// Call this before `commitDeclState`.
pub fn initDeclState(self: *Dwarf, decl: *Module.Decl) !void {
const tracy = trace(@src());
defer tracy.end();
const decl_name = try decl.getFullyQualifiedName(self.allocator);
defer self.allocator.free(decl_name);
log.debug("initDeclDebugInfo {s}{*}", .{ decl_name, decl });
log.debug("initDeclState {s}{*}", .{ decl_name, decl });
const gpa = self.allocator;
var dbg_line_buffer = std.ArrayList(u8).init(gpa);
var dbg_info_buffer = std.ArrayList(u8).init(gpa);
var dbg_info_type_relocs: File.DbgInfoTypeRelocsTable = .{};
assert(self.decl_state == null);
self.decl_state = DeclState.init(gpa);
const dbg_line_buffer = &self.decl_state.?.dbg_line;
const dbg_info_buffer = &self.decl_state.?.dbg_info;
assert(decl.has_tv);
@ -202,19 +267,17 @@ pub fn initDeclDebugInfo(self: *Dwarf, decl: *Module.Decl) !DeclDebugBuffers {
dbg_info_buffer.items.len += ptr_width_bytes; // DW.AT.low_pc, DW.FORM.addr
assert(self.getRelocDbgInfoSubprogramHighPC() == dbg_info_buffer.items.len);
dbg_info_buffer.items.len += 4; // DW.AT.high_pc, DW.FORM.data4
//
if (fn_ret_has_bits) {
const gop = try dbg_info_type_relocs.getOrPutContext(gpa, fn_ret_type, .{
.target = self.target,
});
if (!gop.found_existing) {
gop.value_ptr.* = .{
.off = undefined,
.relocs = .{},
};
}
try gop.value_ptr.relocs.append(gpa, @intCast(u32, dbg_info_buffer.items.len));
const atom = switch (self.tag) {
.elf => &decl.link.elf.dbg_info_atom,
.macho => &decl.link.macho.dbg_info_atom,
else => unreachable,
};
try self.addTypeReloc(atom, fn_ret_type, @intCast(u32, dbg_info_buffer.items.len), null);
dbg_info_buffer.items.len += 4; // DW.AT.type, DW.FORM.ref4
}
dbg_info_buffer.appendSliceAssumeCapacity(decl_name_with_null); // DW.AT.name, DW.FORM.string
},
@ -222,30 +285,28 @@ pub fn initDeclDebugInfo(self: *Dwarf, decl: *Module.Decl) !DeclDebugBuffers {
// TODO implement .debug_info for global variables
},
}
return DeclDebugBuffers{
.dbg_info_buffer = dbg_info_buffer,
.dbg_line_buffer = dbg_line_buffer,
.dbg_info_type_relocs = dbg_info_type_relocs,
};
}
pub fn commitDeclDebugInfo(
pub fn commitDeclState(
self: *Dwarf,
file: *File,
module: *Module,
decl: *Module.Decl,
sym_addr: u64,
sym_size: u64,
debug_buffers: *DeclDebugBuffers,
) !void {
const tracy = trace(@src());
defer tracy.end();
assert(self.decl_state != null); // Caller forgot to call `initDeclState`
defer {
self.decl_state.?.deinit(self.allocator);
self.decl_state = null;
}
const gpa = self.allocator;
var dbg_line_buffer = &debug_buffers.dbg_line_buffer;
var dbg_info_buffer = &debug_buffers.dbg_info_buffer;
var dbg_info_type_relocs = &debug_buffers.dbg_info_type_relocs;
var dbg_line_buffer = &self.decl_state.?.dbg_line;
var dbg_info_buffer = &self.decl_state.?.dbg_info;
const target_endian = self.target.cpu.arch.endian();
@ -443,68 +504,65 @@ pub fn commitDeclDebugInfo(
if (dbg_info_buffer.items.len == 0)
return;
// We need this for the duration of this function only so that for composite
// types such as []const u32, if the type *u32 is non-existent, we create
// it synthetically and store the backing bytes in this arena. After we are
// done with the relocations, we can safely deinit the entire memory slab.
// TODO currently, we do not store the relocations for future use, however,
// if that is the case, we should move memory management to a higher scope,
// such as linker scope, or whatnot.
var dbg_type_arena = std.heap.ArenaAllocator.init(gpa);
defer dbg_type_arena.deinit();
var nested_ref4_relocs = std.ArrayList(u32).init(gpa);
defer nested_ref4_relocs.deinit();
{
// Now we emit the .debug_info types of the Decl. These will count towards the size of
// the buffer, so we have to do it before computing the offset, and we can't perform the actual
// relocations yet.
var it: usize = 0;
while (it < dbg_info_type_relocs.count()) : (it += 1) {
const ty = dbg_info_type_relocs.keys()[it];
const value_ptr = dbg_info_type_relocs.getPtrContext(ty, .{
.target = self.target,
}).?;
value_ptr.off = @intCast(u32, dbg_info_buffer.items.len);
try self.addDbgInfoType(
dbg_type_arena.allocator(),
ty,
dbg_info_buffer,
dbg_info_type_relocs,
&nested_ref4_relocs,
);
}
}
const atom = switch (self.tag) {
.elf => &decl.link.elf.dbg_info_atom,
.macho => &decl.link.macho.dbg_info_atom,
else => unreachable,
};
try self.updateDeclDebugInfoAllocation(file, atom, @intCast(u32, dbg_info_buffer.items.len));
const decl_state = &self.decl_state.?;
{
// Now that we have the offset assigned we can finally perform type relocations.
for (dbg_info_type_relocs.values()) |value| {
for (value.relocs.items) |off| {
mem.writeInt(
u32,
dbg_info_buffer.items[off..][0..4],
atom.off + value.off,
target_endian,
);
}
// Now we emit the .debug_info types of the Decl. These will count towards the size of
// the buffer, so we have to do it before computing the offset, and we can't perform the actual
// relocations yet.
var sym_index: usize = 0;
while (sym_index < decl_state.abbrev_table.items.len) : (sym_index += 1) {
const symbol = &decl_state.abbrev_table.items[sym_index];
const ty = symbol.@"type";
const deferred: bool = blk: {
if (ty.isAnyError()) break :blk true;
switch (ty.tag()) {
.error_set_inferred => {
if (!ty.castTag(.error_set_inferred).?.data.is_resolved) break :blk true;
},
else => {},
}
break :blk false;
};
if (deferred) continue;
symbol.offset = @intCast(u32, dbg_info_buffer.items.len);
try self.addDbgInfoType(decl_state.abbrev_type_arena.allocator(), module, atom, ty, dbg_info_buffer);
}
// Offsets to positions with known a priori relative displacement values.
// Here, we just need to add the offset of the atom to the read value in the
// relocated cell.
// TODO Should probably generalise this with type relocs.
for (nested_ref4_relocs.items) |off| {
const addend = mem.readInt(u32, dbg_info_buffer.items[off..][0..4], target_endian);
}
try self.updateDeclDebugInfoAllocation(file, atom, @intCast(u32, dbg_info_buffer.items.len));
while (decl_state.abbrev_relocs.popOrNull()) |reloc| {
const symbol = decl_state.abbrev_table.items[reloc.target];
const ty = symbol.@"type";
const deferred: bool = blk: {
if (ty.isAnyError()) break :blk true;
switch (ty.tag()) {
.error_set_inferred => {
if (!ty.castTag(.error_set_inferred).?.data.is_resolved) break :blk true;
},
else => {},
}
break :blk false;
};
if (deferred) {
try self.global_abbrev_relocs.append(gpa, .{
.target = undefined,
.offset = reloc.offset,
.atom = reloc.atom,
.addend = reloc.addend,
});
} else {
mem.writeInt(
u32,
dbg_info_buffer.items[off..][0..4],
atom.off + addend,
dbg_info_buffer.items[reloc.offset..][0..@sizeOf(u32)],
symbol.atom.off + symbol.offset + reloc.addend,
target_endian,
);
}
@ -513,7 +571,7 @@ pub fn commitDeclDebugInfo(
try self.writeDeclDebugInfo(file, atom, dbg_info_buffer.items);
}
fn updateDeclDebugInfoAllocation(self: *Dwarf, file: *File, atom: *DebugInfoAtom, len: u32) !void {
fn updateDeclDebugInfoAllocation(self: *Dwarf, file: *File, atom: *Atom, len: u32) !void {
const tracy = trace(@src());
defer tracy.end();
@ -523,14 +581,14 @@ fn updateDeclDebugInfoAllocation(self: *Dwarf, file: *File, atom: *DebugInfoAtom
const gpa = self.allocator;
atom.len = len;
if (self.dbg_info_decl_last) |last| blk: {
if (self.atom_last) |last| blk: {
if (atom == last) break :blk;
if (atom.next) |next| {
// Update existing Decl - non-last item.
if (atom.off + atom.len + min_nop_size > next.off) {
// It grew too big, so we move it to a new location.
if (atom.prev) |prev| {
self.dbg_info_decl_free_list.put(gpa, prev, {}) catch {};
self.atom_free_list.put(gpa, prev, {}) catch {};
prev.next = atom.next;
}
next.prev = atom.prev;
@ -556,7 +614,7 @@ fn updateDeclDebugInfoAllocation(self: *Dwarf, file: *File, atom: *DebugInfoAtom
// TODO Look at the free list before appending at the end.
atom.prev = last;
last.next = atom;
self.dbg_info_decl_last = atom;
self.atom_last = atom;
atom.off = last.off + padToIdeal(last.len);
}
@ -565,20 +623,20 @@ fn updateDeclDebugInfoAllocation(self: *Dwarf, file: *File, atom: *DebugInfoAtom
// TODO Look at the free list before appending at the end.
atom.prev = last;
last.next = atom;
self.dbg_info_decl_last = atom;
self.atom_last = atom;
atom.off = last.off + padToIdeal(last.len);
}
} else {
// This is the first Decl of the .debug_info
self.dbg_info_decl_first = atom;
self.dbg_info_decl_last = atom;
self.atom_first = atom;
self.atom_last = atom;
atom.off = @intCast(u32, padToIdeal(self.dbgInfoHeaderBytes()));
}
}
fn writeDeclDebugInfo(self: *Dwarf, file: *File, atom: *DebugInfoAtom, dbg_info_buf: []const u8) !void {
fn writeDeclDebugInfo(self: *Dwarf, file: *File, atom: *Atom, dbg_info_buf: []const u8) !void {
const tracy = trace(@src());
defer tracy.end();
@ -587,7 +645,7 @@ fn writeDeclDebugInfo(self: *Dwarf, file: *File, atom: *DebugInfoAtom, dbg_info_
// probably need to edit that logic too.
const gpa = self.allocator;
const last_decl = self.dbg_info_decl_last.?;
const last_decl = self.atom_last.?;
// +1 for a trailing zero to end the children of the decl tag.
const needed_size = last_decl.off + last_decl.len + 1;
const prev_padding_size: u32 = if (atom.prev) |prev| atom.off - (prev.off + prev.len) else 0;
@ -712,13 +770,13 @@ pub fn updateDeclLineNumber(self: *Dwarf, file: *File, decl: *const Module.Decl)
}
}
pub fn freeAtom(self: *Dwarf, atom: *DebugInfoAtom) void {
if (self.dbg_info_decl_first == atom) {
self.dbg_info_decl_first = atom.next;
pub fn freeAtom(self: *Dwarf, atom: *Atom) void {
if (self.atom_first == atom) {
self.atom_first = atom.next;
}
if (self.dbg_info_decl_last == atom) {
if (self.atom_last == atom) {
// TODO shrink the .debug_info section size here
self.dbg_info_decl_last = atom.prev;
self.atom_last = atom.prev;
}
if (atom.prev) |prev| {
@ -771,14 +829,13 @@ pub fn freeDecl(self: *Dwarf, decl: *Module.Decl) void {
fn addDbgInfoType(
self: *Dwarf,
arena: Allocator,
module: *Module,
atom: *Atom,
ty: Type,
dbg_info_buffer: *std.ArrayList(u8),
dbg_info_type_relocs: *File.DbgInfoTypeRelocsTable,
nested_ref4_relocs: *std.ArrayList(u32),
) error{OutOfMemory}!void {
const target = self.target;
const target_endian = self.target.cpu.arch.endian();
var relocs = std.ArrayList(struct { ty: Type, reloc: u32 }).init(arena);
switch (ty.zigTypeTag()) {
.NoReturn => unreachable,
@ -837,7 +894,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
var index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = Type.bool, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, Type.bool, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.ensureUnusedCapacity(6);
dbg_info_buffer.appendAssumeCapacity(0);
@ -849,7 +906,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = payload_ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, payload_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
const offset = abi_size - payload_ty.abiSize(target);
try leb128.writeULEB128(dbg_info_buffer.writer(), offset);
@ -878,7 +935,7 @@ fn addDbgInfoType(
try dbg_info_buffer.resize(index + 4);
var buf = try arena.create(Type.SlicePtrFieldTypeBuffer);
const ptr_ty = ty.slicePtrFieldType(buf);
try relocs.append(.{ .ty = ptr_ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, ptr_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.ensureUnusedCapacity(6);
dbg_info_buffer.appendAssumeCapacity(0);
@ -890,7 +947,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = Type.initTag(.usize), .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, Type.usize, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.ensureUnusedCapacity(2);
dbg_info_buffer.appendAssumeCapacity(@sizeOf(usize));
@ -902,7 +959,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
const index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = ty.childType(), .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, ty.childType(), @intCast(u32, index), null);
}
},
.Struct => blk: {
@ -926,7 +983,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
var index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = field, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, field, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
const field_off = ty.structFieldOffset(field_index, target);
try leb128.writeULEB128(dbg_info_buffer.writer(), field_off);
@ -957,7 +1014,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
var index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = field.ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, field.ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
const field_off = ty.structFieldOffset(field_index, target);
try leb128.writeULEB128(dbg_info_buffer.writer(), field_off);
@ -1036,14 +1093,8 @@ fn addDbgInfoType(
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.type, DW.FORM.ref4
const inner_union_index = dbg_info_buffer.items.len;
try dbg_info_buffer.ensureUnusedCapacity(4);
mem.writeInt(
u32,
dbg_info_buffer.addManyAsArrayAssumeCapacity(4),
@intCast(u32, inner_union_index + 5),
target_endian,
);
try nested_ref4_relocs.append(@intCast(u32, inner_union_index));
try dbg_info_buffer.resize(inner_union_index + 4);
try self.addTypeReloc(atom, ty, @intCast(u32, inner_union_index), 5);
// DW.AT.data_member_location, DW.FORM.sdata
try leb128.writeULEB128(dbg_info_buffer.writer(), payload_offset);
}
@ -1070,7 +1121,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
const index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = field.ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, field.ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.append(0);
}
@ -1087,7 +1138,7 @@ fn addDbgInfoType(
// DW.AT.type, DW.FORM.ref4
const index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try relocs.append(.{ .ty = union_obj.tag_ty, .reloc = @intCast(u32, index) });
try self.addTypeReloc(atom, union_obj.tag_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try leb128.writeULEB128(dbg_info_buffer.writer(), tag_offset);
@ -1095,24 +1146,91 @@ fn addDbgInfoType(
try dbg_info_buffer.append(0);
}
},
.ErrorSet => {
// DW.AT.enumeration_type
try dbg_info_buffer.append(abbrev_enum_type);
// DW.AT.byte_size, DW.FORM.sdata
const abi_size = ty.abiSize(target);
try leb128.writeULEB128(dbg_info_buffer.writer(), abi_size);
// DW.AT.name, DW.FORM.string
const name = try ty.nameAllocArena(arena, target);
try dbg_info_buffer.writer().print("{s}\x00", .{name});
// DW.AT.enumerator
const no_error = "(no error)";
try dbg_info_buffer.ensureUnusedCapacity(no_error.len + 2 + @sizeOf(u64));
dbg_info_buffer.appendAssumeCapacity(abbrev_enum_variant);
// DW.AT.name, DW.FORM.string
dbg_info_buffer.appendSliceAssumeCapacity(no_error);
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.const_value, DW.FORM.data8
mem.writeInt(u64, dbg_info_buffer.addManyAsArrayAssumeCapacity(8), 0, target_endian);
const error_names = ty.errorSetNames();
for (error_names) |error_name| {
const kv = module.getErrorValue(error_name) catch unreachable;
// DW.AT.enumerator
try dbg_info_buffer.ensureUnusedCapacity(error_name.len + 2 + @sizeOf(u64));
dbg_info_buffer.appendAssumeCapacity(abbrev_enum_variant);
// DW.AT.name, DW.FORM.string
dbg_info_buffer.appendSliceAssumeCapacity(error_name);
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.const_value, DW.FORM.data8
mem.writeInt(u64, dbg_info_buffer.addManyAsArrayAssumeCapacity(8), kv.value, target_endian);
}
// DW.AT.enumeration_type delimit children
try dbg_info_buffer.append(0);
},
.ErrorUnion => {
const error_ty = ty.errorUnionSet();
const payload_ty = ty.errorUnionPayload();
const abi_size = ty.abiSize(target);
const abi_align = ty.abiAlignment(target);
const payload_off = mem.alignForwardGeneric(u64, error_ty.abiSize(target), abi_align);
// DW.AT.structure_type
try dbg_info_buffer.append(abbrev_struct_type);
// DW.AT.byte_size, DW.FORM.sdata
try leb128.writeULEB128(dbg_info_buffer.writer(), abi_size);
// DW.AT.name, DW.FORM.string
const name = try ty.nameAllocArena(arena, target);
try dbg_info_buffer.writer().print("{s}\x00", .{name});
// DW.AT.member
try dbg_info_buffer.ensureUnusedCapacity(7);
dbg_info_buffer.appendAssumeCapacity(abbrev_struct_member);
// DW.AT.name, DW.FORM.string
dbg_info_buffer.appendSliceAssumeCapacity("value");
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.type, DW.FORM.ref4
var index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try self.addTypeReloc(atom, payload_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try leb128.writeULEB128(dbg_info_buffer.writer(), payload_off);
// DW.AT.member
try dbg_info_buffer.ensureUnusedCapacity(5);
dbg_info_buffer.appendAssumeCapacity(abbrev_struct_member);
// DW.AT.name, DW.FORM.string
dbg_info_buffer.appendSliceAssumeCapacity("err");
dbg_info_buffer.appendAssumeCapacity(0);
// DW.AT.type, DW.FORM.ref4
index = dbg_info_buffer.items.len;
try dbg_info_buffer.resize(index + 4);
try self.addTypeReloc(atom, error_ty, @intCast(u32, index), null);
// DW.AT.data_member_location, DW.FORM.sdata
try dbg_info_buffer.append(0);
// DW.AT.structure_type delimit children
try dbg_info_buffer.append(0);
},
else => {
log.debug("TODO implement .debug_info for type '{}'", .{ty.fmtDebug()});
try dbg_info_buffer.append(abbrev_pad1);
},
}
for (relocs.items) |rel| {
const gop = try dbg_info_type_relocs.getOrPutContext(self.allocator, rel.ty, .{
.target = self.target,
});
if (!gop.found_existing) {
gop.value_ptr.* = .{
.off = undefined,
.relocs = .{},
};
}
try gop.value_ptr.relocs.append(self.allocator, rel.reloc);
}
}
pub fn writeDbgAbbrev(self: *Dwarf, file: *File) !void {
@ -1763,12 +1881,12 @@ pub fn writeDbgLineHeader(self: *Dwarf, file: *File, module: *Module) !void {
}
fn getDebugInfoOff(self: Dwarf) ?u32 {
const first = self.dbg_info_decl_first orelse return null;
const first = self.atom_first orelse return null;
return first.off;
}
fn getDebugInfoEnd(self: Dwarf) ?u32 {
const last = self.dbg_info_decl_last orelse return null;
const last = self.atom_last orelse return null;
return last.off + last.len;
}
@ -1833,3 +1951,115 @@ fn padToIdeal(actual_size: anytype) @TypeOf(actual_size) {
return std.math.add(@TypeOf(actual_size), actual_size, actual_size / ideal_factor) catch
std.math.maxInt(@TypeOf(actual_size));
}
pub fn addTypeReloc(self: *Dwarf, atom: *const Atom, ty: Type, offset: u32, addend: ?u32) !void {
const decl_state = &self.decl_state.?;
const gpa = self.allocator;
const resolv = decl_state.abbrev_resolver.getContext(ty, .{
.target = self.target,
}) orelse blk: {
const sym_index = @intCast(u32, decl_state.abbrev_table.items.len);
try decl_state.abbrev_table.append(gpa, .{
.atom = atom,
.@"type" = ty,
.offset = undefined,
});
log.debug("@{d}: {}", .{ sym_index, ty.fmtDebug() });
try decl_state.abbrev_resolver.putNoClobberContext(gpa, ty, sym_index, .{
.target = self.target,
});
break :blk decl_state.abbrev_resolver.getContext(ty, .{
.target = self.target,
}).?;
};
const add: u32 = addend orelse 0;
log.debug("{x}: @{d} + {x}", .{ offset, resolv, add });
try decl_state.abbrev_relocs.append(gpa, .{
.target = resolv,
.atom = atom,
.offset = offset,
.addend = add,
});
}
pub fn getDeclDebugLineBuffer(self: *Dwarf) *std.ArrayList(u8) {
return &self.decl_state.?.dbg_line;
}
pub fn getDeclDebugInfoBuffer(self: *Dwarf) *std.ArrayList(u8) {
return &self.decl_state.?.dbg_info;
}
pub fn flushModule(self: *Dwarf, file: *File, module: *Module) !void {
if (self.global_abbrev_relocs.items.len > 0) {
const gpa = self.allocator;
var arena_alloc = std.heap.ArenaAllocator.init(gpa);
defer arena_alloc.deinit();
const arena = arena_alloc.allocator();
const error_set = try arena.create(Module.ErrorSet);
const error_ty = try Type.Tag.error_set.create(arena, error_set);
var names = Module.ErrorSet.NameMap{};
try names.ensureUnusedCapacity(arena, module.global_error_set.count());
var it = module.global_error_set.keyIterator();
while (it.next()) |key| {
names.putAssumeCapacityNoClobber(key.*, {});
}
error_set.names = names;
const atom = try gpa.create(Atom);
errdefer gpa.destroy(atom);
atom.* = .{
.prev = null,
.next = null,
.off = 0,
.len = 0,
};
var dbg_info_buffer = std.ArrayList(u8).init(arena);
try self.addDbgInfoType(arena, module, atom, error_ty, &dbg_info_buffer);
try self.managed_atoms.append(gpa, atom);
try self.updateDeclDebugInfoAllocation(file, atom, @intCast(u32, dbg_info_buffer.items.len));
try self.writeDeclDebugInfo(file, atom, dbg_info_buffer.items);
const file_pos = blk: {
switch (self.tag) {
.elf => {
const elf_file = file.cast(File.Elf).?;
const debug_info_sect = &elf_file.sections.items[elf_file.debug_info_section_index.?];
break :blk debug_info_sect.sh_offset;
},
.macho => {
const macho_file = file.cast(File.MachO).?;
const d_sym = &macho_file.d_sym.?;
const dwarf_segment = &d_sym.load_commands.items[d_sym.dwarf_segment_cmd_index.?].segment;
const debug_info_sect = &dwarf_segment.sections.items[d_sym.debug_info_section_index.?];
break :blk debug_info_sect.offset;
},
else => unreachable,
}
};
var buf: [@sizeOf(u32)]u8 = undefined;
mem.writeInt(u32, &buf, atom.off, self.target.cpu.arch.endian());
while (self.global_abbrev_relocs.popOrNull()) |reloc| {
switch (self.tag) {
.elf => {
const elf_file = file.cast(File.Elf).?;
try elf_file.base.file.?.pwriteAll(&buf, file_pos + reloc.atom.off + reloc.offset);
},
.macho => {
const macho_file = file.cast(File.MachO).?;
const d_sym = &macho_file.d_sym.?;
try d_sym.file.pwriteAll(&buf, file_pos + reloc.atom.off + reloc.offset);
},
else => unreachable,
}
}
}
assert(self.decl_state == null);
}

View File

@ -207,7 +207,7 @@ pub const TextBlock = struct {
prev: ?*TextBlock,
next: ?*TextBlock,
dbg_info_atom: Dwarf.DebugInfoAtom,
dbg_info_atom: Dwarf.Atom,
pub const empty = TextBlock{
.local_sym_index = 0,
@ -958,6 +958,10 @@ pub fn flushModule(self: *Elf, comp: *Compilation) !void {
const target_endian = self.base.options.target.cpu.arch.endian();
const foreign_endian = target_endian != builtin.cpu.arch.endian();
if (self.dwarf) |*dw| {
try dw.flushModule(&self.base, module);
}
{
var it = self.relocs.iterator();
while (it.next()) |entry| {
@ -2228,13 +2232,6 @@ pub fn freeDecl(self: *Elf, decl: *Module.Decl) void {
}
}
fn deinitRelocs(gpa: Allocator, table: *File.DbgInfoTypeRelocsTable) void {
for (table.values()) |*value| {
value.relocs.deinit(gpa);
}
table.deinit(gpa);
}
fn getDeclPhdrIndex(self: *Elf, decl: *Module.Decl) !u16 {
const ty = decl.ty;
const zig_ty = ty.zigTypeTag();
@ -2342,26 +2339,13 @@ pub fn updateFunc(self: *Elf, module: *Module, func: *Module.Fn, air: Air, liven
const decl = func.owner_decl;
self.freeUnnamedConsts(decl);
var debug_buffers_buf: Dwarf.DeclDebugBuffers = undefined;
const debug_buffers = if (self.dwarf) |*dw| blk: {
debug_buffers_buf = try dw.initDeclDebugInfo(decl);
break :blk &debug_buffers_buf;
} else null;
defer {
if (debug_buffers) |dbg| {
dbg.dbg_line_buffer.deinit();
dbg.dbg_info_buffer.deinit();
deinitRelocs(self.base.allocator, &dbg.dbg_info_type_relocs);
}
if (self.dwarf) |*dw| {
try dw.initDeclState(decl);
}
const res = if (debug_buffers) |dbg|
const res = if (self.dwarf) |*dw|
try codegen.generateFunction(&self.base, decl.srcLoc(), func, air, liveness, &code_buffer, .{
.dwarf = .{
.dbg_line = &dbg.dbg_line_buffer,
.dbg_info = &dbg.dbg_info_buffer,
.dbg_info_type_relocs = &dbg.dbg_info_type_relocs,
},
.dwarf = dw,
})
else
try codegen.generateFunction(&self.base, decl.srcLoc(), func, air, liveness, &code_buffer, .none);
@ -2375,8 +2359,8 @@ pub fn updateFunc(self: *Elf, module: *Module, func: *Module.Fn, air: Air, liven
},
};
const local_sym = try self.updateDeclCode(decl, code, elf.STT_FUNC);
if (debug_buffers) |dbg| {
try self.dwarf.?.commitDeclDebugInfo(&self.base, module, decl, local_sym.st_value, local_sym.st_size, dbg);
if (self.dwarf) |*dw| {
try dw.commitDeclState(&self.base, module, decl, local_sym.st_value, local_sym.st_size);
}
// Since we updated the vaddr and the size, each corresponding export symbol also needs to be updated.
@ -2410,31 +2394,18 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit();
var debug_buffers_buf: Dwarf.DeclDebugBuffers = undefined;
const debug_buffers = if (self.dwarf) |*dw| blk: {
debug_buffers_buf = try dw.initDeclDebugInfo(decl);
break :blk &debug_buffers_buf;
} else null;
defer {
if (debug_buffers) |dbg| {
dbg.dbg_line_buffer.deinit();
dbg.dbg_info_buffer.deinit();
deinitRelocs(self.base.allocator, &dbg.dbg_info_type_relocs);
}
if (self.dwarf) |*dw| {
try dw.initDeclState(decl);
}
// TODO implement .debug_info for global variables
const decl_val = if (decl.val.castTag(.variable)) |payload| payload.data.init else decl.val;
const res = if (debug_buffers) |dbg|
const res = if (self.dwarf) |*dw|
try codegen.generateSymbol(&self.base, decl.srcLoc(), .{
.ty = decl.ty,
.val = decl_val,
}, &code_buffer, .{
.dwarf = .{
.dbg_line = &dbg.dbg_line_buffer,
.dbg_info = &dbg.dbg_info_buffer,
.dbg_info_type_relocs = &dbg.dbg_info_type_relocs,
},
.dwarf = dw,
}, .{
.parent_atom_index = decl.link.elf.local_sym_index,
})
@ -2457,8 +2428,8 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
};
const local_sym = try self.updateDeclCode(decl, code, elf.STT_OBJECT);
if (debug_buffers) |dbg| {
try self.dwarf.?.commitDeclDebugInfo(&self.base, module, decl, local_sym.st_value, local_sym.st_size, dbg);
if (self.dwarf) |*dw| {
try dw.commitDeclState(&self.base, module, decl, local_sym.st_value, local_sym.st_size);
}
// Since we updated the vaddr and the size, each corresponding export symbol also needs to be updated.

View File

@ -453,6 +453,12 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void {
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
const full_out_path = try directory.join(arena, &[_][]const u8{self.base.options.emit.?.sub_path});
if (self.d_sym) |*d_sym| {
if (self.base.options.module) |module| {
try d_sym.dwarf.flushModule(&self.base, module);
}
}
// If there is no Zig code to compile, then we should skip flushing the output file because it
// will not be part of the linker line anyway.
const module_obj_path: ?[]const u8 = if (self.base.options.module) |module| blk: {
@ -3670,32 +3676,17 @@ pub fn updateFunc(self: *MachO, module: *Module, func: *Module.Fn, air: Air, liv
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit();
var debug_buffers_buf: link.File.Dwarf.DeclDebugBuffers = undefined;
const debug_buffers = if (self.d_sym) |*d_sym| blk: {
debug_buffers_buf = try d_sym.initDeclDebugInfo(module, decl);
break :blk &debug_buffers_buf;
} else null;
defer {
if (debug_buffers) |dbg| {
dbg.dbg_line_buffer.deinit();
dbg.dbg_info_buffer.deinit();
for (dbg.dbg_info_type_relocs.values()) |*value| {
value.relocs.deinit(self.base.allocator);
}
dbg.dbg_info_type_relocs.deinit(self.base.allocator);
}
if (self.d_sym) |*d_sym| {
try d_sym.dwarf.initDeclState(decl);
}
const res = if (debug_buffers) |dbg|
const res = if (self.d_sym) |*d_sym|
try codegen.generateFunction(&self.base, decl.srcLoc(), func, air, liveness, &code_buffer, .{
.dwarf = .{
.dbg_line = &dbg.dbg_line_buffer,
.dbg_info = &dbg.dbg_info_buffer,
.dbg_info_type_relocs = &dbg.dbg_info_type_relocs,
},
.dwarf = &d_sym.dwarf,
})
else
try codegen.generateFunction(&self.base, decl.srcLoc(), func, air, liveness, &code_buffer, .none);
switch (res) {
.appended => {
try decl.link.macho.code.appendSlice(self.base.allocator, code_buffer.items);
@ -3707,12 +3698,10 @@ pub fn updateFunc(self: *MachO, module: *Module, func: *Module.Fn, air: Air, liv
},
}
_ = try self.placeDecl(decl, decl.link.macho.code.items.len);
const symbol = try self.placeDecl(decl, decl.link.macho.code.items.len);
if (debug_buffers) |db| {
if (self.d_sym) |*d_sym| {
try d_sym.commitDeclDebugInfo(module, decl, db);
}
if (self.d_sym) |*d_sym| {
try d_sym.dwarf.commitDeclState(&self.base, module, decl, symbol.n_value, decl.link.macho.size);
}
// Since we updated the vaddr and the size, each corresponding export symbol also
@ -3812,33 +3801,17 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit();
var debug_buffers_buf: link.File.Dwarf.DeclDebugBuffers = undefined;
const debug_buffers = if (self.d_sym) |*d_sym| blk: {
debug_buffers_buf = try d_sym.initDeclDebugInfo(module, decl);
break :blk &debug_buffers_buf;
} else null;
defer {
if (debug_buffers) |dbg| {
dbg.dbg_line_buffer.deinit();
dbg.dbg_info_buffer.deinit();
for (dbg.dbg_info_type_relocs.values()) |*value| {
value.relocs.deinit(self.base.allocator);
}
dbg.dbg_info_type_relocs.deinit(self.base.allocator);
}
if (self.d_sym) |*d_sym| {
try d_sym.dwarf.initDeclState(decl);
}
const decl_val = if (decl.val.castTag(.variable)) |payload| payload.data.init else decl.val;
const res = if (debug_buffers) |dbg|
const res = if (self.d_sym) |*d_sym|
try codegen.generateSymbol(&self.base, decl.srcLoc(), .{
.ty = decl.ty,
.val = decl_val,
}, &code_buffer, .{
.dwarf = .{
.dbg_line = &dbg.dbg_line_buffer,
.dbg_info = &dbg.dbg_info_buffer,
.dbg_info_type_relocs = &dbg.dbg_info_type_relocs,
},
.dwarf = &d_sym.dwarf,
}, .{
.parent_atom_index = decl.link.macho.local_sym_index,
})
@ -3870,7 +3843,11 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
},
}
};
_ = try self.placeDecl(decl, code.len);
const symbol = try self.placeDecl(decl, code.len);
if (self.d_sym) |*d_sym| {
try d_sym.dwarf.commitDeclState(&self.base, module, decl, symbol.n_value, decl.link.macho.size);
}
// Since we updated the vaddr and the size, each corresponding export symbol also
// needs to be updated.
@ -4084,8 +4061,9 @@ fn placeDecl(self: *MachO, decl: *Module.Decl, code_len: usize) !*macho.nlist_64
}
pub fn updateDeclLineNumber(self: *MachO, module: *Module, decl: *const Module.Decl) !void {
_ = module;
if (self.d_sym) |*d_sym| {
try d_sym.updateDeclLineNumber(module, decl);
try d_sym.dwarf.updateDeclLineNumber(&self.base, decl);
}
}

View File

@ -72,7 +72,7 @@ stab: ?Stab = null,
next: ?*Atom,
prev: ?*Atom,
dbg_info_atom: Dwarf.DebugInfoAtom,
dbg_info_atom: Dwarf.Atom,
dirty: bool = true,

View File

@ -645,25 +645,3 @@ fn writeStringTable(self: *DebugSymbols) !void {
self.load_commands_dirty = true;
}
pub fn updateDeclLineNumber(self: *DebugSymbols, module: *Module, decl: *const Module.Decl) !void {
_ = module;
return self.dwarf.updateDeclLineNumber(&self.base.base, decl);
}
/// Caller owns the returned memory.
pub fn initDeclDebugInfo(self: *DebugSymbols, module: *Module, decl: *Module.Decl) !Dwarf.DeclDebugBuffers {
_ = module;
return self.dwarf.initDeclDebugInfo(decl);
}
pub fn commitDeclDebugInfo(
self: *DebugSymbols,
module: *Module,
decl: *Module.Decl,
debug_buffers: *Dwarf.DeclDebugBuffers,
) !void {
const symbol = self.base.locals.items[decl.link.macho.local_sym_index];
const atom = &decl.link.macho;
return self.dwarf.commitDeclDebugInfo(&self.base.base, module, decl, symbol.n_value, atom.size, debug_buffers);
}