mirror of
https://github.com/ziglang/zig.git
synced 2024-11-15 00:26:57 +00:00
coff: compile and link simple exit program on arm64
* make image base target dependent * fix relocs to imports
This commit is contained in:
parent
205421c311
commit
83d89a05b7
@ -36,7 +36,7 @@ pub const default_mode: ModeOverride = if (is_async) Mode.evented else .blocking
|
||||
|
||||
fn getStdOutHandle() os.fd_t {
|
||||
if (builtin.os.tag == .windows) {
|
||||
if (builtin.zig_backend == .stage2_x86_64) {
|
||||
if (builtin.zig_backend == .stage2_x86_64 or builtin.zig_backend == .stage2_aarch64) {
|
||||
// TODO: this is just a temporary workaround until we advance x86 backend further along.
|
||||
return os.windows.GetStdHandle(os.windows.STD_OUTPUT_HANDLE) catch os.windows.INVALID_HANDLE_VALUE;
|
||||
}
|
||||
@ -62,7 +62,7 @@ pub fn getStdOut() File {
|
||||
|
||||
fn getStdErrHandle() os.fd_t {
|
||||
if (builtin.os.tag == .windows) {
|
||||
if (builtin.zig_backend == .stage2_x86_64) {
|
||||
if (builtin.zig_backend == .stage2_x86_64 or builtin.zig_backend == .stage2_aarch64) {
|
||||
// TODO: this is just a temporary workaround until we advance x86 backend further along.
|
||||
return os.windows.GetStdHandle(os.windows.STD_ERROR_HANDLE) catch os.windows.INVALID_HANDLE_VALUE;
|
||||
}
|
||||
@ -88,7 +88,7 @@ pub fn getStdErr() File {
|
||||
|
||||
fn getStdInHandle() os.fd_t {
|
||||
if (builtin.os.tag == .windows) {
|
||||
if (builtin.zig_backend == .stage2_x86_64) {
|
||||
if (builtin.zig_backend == .stage2_x86_64 or builtin.zig_backend == .stage2_aarch64) {
|
||||
// TODO: this is just a temporary workaround until we advance x86 backend further along.
|
||||
return os.windows.GetStdHandle(os.windows.STD_INPUT_HANDLE) catch os.windows.INVALID_HANDLE_VALUE;
|
||||
}
|
||||
|
@ -142,7 +142,8 @@ const MCValue = union(enum) {
|
||||
/// The value is in memory but requires a linker relocation fixup:
|
||||
/// * got - the value is referenced indirectly via GOT entry index (the linker emits a got-type reloc)
|
||||
/// * direct - the value is referenced directly via symbol index index (the linker emits a displacement reloc)
|
||||
linker_load: struct { @"type": enum { got, direct }, sym_index: u32 },
|
||||
/// * import - the value is referenced indirectly via import entry index (the linker emits an import-type reloc)
|
||||
linker_load: struct { @"type": enum { got, direct, import }, sym_index: u32 },
|
||||
/// The value is one of the stack variables.
|
||||
///
|
||||
/// If the type is a pointer, it means the pointer address is in
|
||||
@ -3717,6 +3718,7 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
|
||||
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
|
||||
.got => .load_memory_ptr_got,
|
||||
.direct => .load_memory_ptr_direct,
|
||||
.import => unreachable,
|
||||
};
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const owner_decl = mod.declPtr(self.mod_fn.owner_decl);
|
||||
@ -4086,16 +4088,17 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
|
||||
});
|
||||
}
|
||||
const sym_index = try coff_file.getGlobalSymbol(mem.sliceTo(decl_name, 0));
|
||||
|
||||
_ = try self.addInst(.{
|
||||
.tag = .call_extern,
|
||||
.data = .{
|
||||
.relocation = .{
|
||||
.atom_index = mod.declPtr(self.mod_fn.owner_decl).link.coff.sym_index,
|
||||
.sym_index = sym_index,
|
||||
},
|
||||
try self.genSetReg(Type.initTag(.u64), .x30, .{
|
||||
.linker_load = .{
|
||||
.@"type" = .import,
|
||||
.sym_index = sym_index,
|
||||
},
|
||||
});
|
||||
// blr x30
|
||||
_ = try self.addInst(.{
|
||||
.tag = .blr,
|
||||
.data = .{ .reg = .x30 },
|
||||
});
|
||||
} else {
|
||||
return self.fail("TODO implement calling bitcasted functions", .{});
|
||||
}
|
||||
@ -4119,8 +4122,6 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
|
||||
} else {
|
||||
return self.fail("TODO implement calling bitcasted functions", .{});
|
||||
}
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |_| {
|
||||
return self.fail("TODO implement calling in COFF for {}", .{self.target.cpu.arch});
|
||||
} else unreachable;
|
||||
} else {
|
||||
assert(ty.zigTypeTag() == .Pointer);
|
||||
@ -5203,6 +5204,7 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
|
||||
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
|
||||
.got => .load_memory_ptr_got,
|
||||
.direct => .load_memory_ptr_direct,
|
||||
.import => unreachable,
|
||||
};
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const owner_decl = mod.declPtr(self.mod_fn.owner_decl);
|
||||
@ -5316,6 +5318,7 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
|
||||
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
|
||||
.got => .load_memory_got,
|
||||
.direct => .load_memory_direct,
|
||||
.import => .load_memory_import,
|
||||
};
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const owner_decl = mod.declPtr(self.mod_fn.owner_decl);
|
||||
@ -5509,6 +5512,7 @@ fn genSetStackArgument(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) I
|
||||
const tag: Mir.Inst.Tag = switch (load_struct.@"type") {
|
||||
.got => .load_memory_ptr_got,
|
||||
.direct => .load_memory_ptr_direct,
|
||||
.import => unreachable,
|
||||
};
|
||||
const mod = self.bin_file.options.module.?;
|
||||
const owner_decl = mod.declPtr(self.mod_fn.owner_decl);
|
||||
@ -5835,7 +5839,13 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne
|
||||
.sym_index = decl.link.macho.sym_index,
|
||||
} };
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |_| {
|
||||
return self.fail("TODO codegen COFF const Decl pointer", .{});
|
||||
// Because COFF is PIE-always-on, we defer memory address resolution until
|
||||
// the linker has enough info to perform relocations.
|
||||
assert(decl.link.coff.sym_index != 0);
|
||||
return MCValue{ .linker_load = .{
|
||||
.@"type" = .got,
|
||||
.sym_index = decl.link.coff.sym_index,
|
||||
} };
|
||||
} else if (self.bin_file.cast(link.File.Plan9)) |p9| {
|
||||
try p9.seeDecl(decl_index);
|
||||
const got_addr = p9.bases.data + decl.link.plan9.got_index.? * ptr_bytes;
|
||||
@ -5859,7 +5869,10 @@ fn lowerUnnamedConst(self: *Self, tv: TypedValue) InnerError!MCValue {
|
||||
.sym_index = local_sym_index,
|
||||
} };
|
||||
} else if (self.bin_file.cast(link.File.Coff)) |_| {
|
||||
return self.fail("TODO lower unnamed const in COFF", .{});
|
||||
return MCValue{ .linker_load = .{
|
||||
.@"type" = .direct,
|
||||
.sym_index = local_sym_index,
|
||||
} };
|
||||
} else if (self.bin_file.cast(link.File.Plan9)) |_| {
|
||||
return self.fail("TODO lower unnamed const in Plan9", .{});
|
||||
} else {
|
||||
|
@ -145,6 +145,7 @@ pub fn emitMir(
|
||||
|
||||
.load_memory_got => try emit.mirLoadMemoryPie(inst),
|
||||
.load_memory_direct => try emit.mirLoadMemoryPie(inst),
|
||||
.load_memory_import => try emit.mirLoadMemoryPie(inst),
|
||||
.load_memory_ptr_got => try emit.mirLoadMemoryPie(inst),
|
||||
.load_memory_ptr_direct => try emit.mirLoadMemoryPie(inst),
|
||||
|
||||
@ -693,18 +694,8 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) !void {
|
||||
.pcrel = true,
|
||||
.length = 2,
|
||||
});
|
||||
} else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
|
||||
// Add relocation to the decl.
|
||||
const atom = coff_file.getAtomForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
|
||||
const target = coff_file.getGlobalByIndex(relocation.sym_index);
|
||||
try atom.addRelocation(coff_file, .{
|
||||
.@"type" = .branch_26,
|
||||
.target = target,
|
||||
.offset = offset,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
.length = 2,
|
||||
});
|
||||
} else if (emit.bin_file.cast(link.File.Coff)) |_| {
|
||||
unreachable; // Calling imports is handled via `.load_memory_import`
|
||||
} else {
|
||||
return emit.fail("Implement call_extern for linking backends != {{ COFF, MachO }}", .{});
|
||||
}
|
||||
@ -868,7 +859,9 @@ fn mirLoadMemoryPie(emit: *Emit, inst: Mir.Inst.Index) !void {
|
||||
try emit.writeInstruction(Instruction.adrp(reg.toX(), 0));
|
||||
|
||||
switch (tag) {
|
||||
.load_memory_got => {
|
||||
.load_memory_got,
|
||||
.load_memory_import,
|
||||
=> {
|
||||
// ldr reg, reg, offset
|
||||
try emit.writeInstruction(Instruction.ldr(
|
||||
reg,
|
||||
@ -941,8 +934,17 @@ fn mirLoadMemoryPie(emit: *Emit, inst: Mir.Inst.Index) !void {
|
||||
});
|
||||
} else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
|
||||
const atom = coff_file.getAtomForSymbol(.{ .sym_index = data.atom_index, .file = null }).?;
|
||||
const target = switch (tag) {
|
||||
.load_memory_got,
|
||||
.load_memory_ptr_got,
|
||||
.load_memory_direct,
|
||||
.load_memory_ptr_direct,
|
||||
=> link.File.Coff.SymbolWithLoc{ .sym_index = data.sym_index, .file = null },
|
||||
.load_memory_import => coff_file.getGlobalByIndex(data.sym_index),
|
||||
else => unreachable,
|
||||
};
|
||||
try atom.addRelocation(coff_file, .{
|
||||
.target = .{ .sym_index = data.sym_index, .file = null },
|
||||
.target = target,
|
||||
.offset = offset,
|
||||
.addend = 0,
|
||||
.pcrel = true,
|
||||
@ -954,11 +956,12 @@ fn mirLoadMemoryPie(emit: *Emit, inst: Mir.Inst.Index) !void {
|
||||
.load_memory_direct,
|
||||
.load_memory_ptr_direct,
|
||||
=> .page,
|
||||
.load_memory_import => .import_page,
|
||||
else => unreachable,
|
||||
},
|
||||
});
|
||||
try atom.addRelocation(coff_file, .{
|
||||
.target = .{ .sym_index = data.sym_index, .file = null },
|
||||
.target = target,
|
||||
.offset = offset + 4,
|
||||
.addend = 0,
|
||||
.pcrel = false,
|
||||
@ -970,6 +973,7 @@ fn mirLoadMemoryPie(emit: *Emit, inst: Mir.Inst.Index) !void {
|
||||
.load_memory_direct,
|
||||
.load_memory_ptr_direct,
|
||||
=> .pageoff,
|
||||
.load_memory_import => .import_pageoff,
|
||||
else => unreachable,
|
||||
},
|
||||
});
|
||||
|
@ -84,6 +84,10 @@ pub const Inst = struct {
|
||||
///
|
||||
/// Payload is `LoadMemoryPie`
|
||||
load_memory_direct,
|
||||
/// Loads the contents into a register
|
||||
///
|
||||
/// Payload is `LoadMemoryPie`
|
||||
load_memory_import,
|
||||
/// Loads the address into a register
|
||||
///
|
||||
/// Payload is `LoadMemoryPie`
|
||||
|
@ -128,18 +128,28 @@ const Entry = struct {
|
||||
pub const Reloc = struct {
|
||||
@"type": enum {
|
||||
// x86, x86_64
|
||||
/// RIP-relative displacement to a GOT pointer
|
||||
got,
|
||||
/// RIP-relative displacement to an import pointer
|
||||
import,
|
||||
|
||||
// aarch64
|
||||
branch_26,
|
||||
/// PC-relative distance to target page in GOT section
|
||||
got_page,
|
||||
/// Offset to a GOT pointer relative to the start of a page in GOT section
|
||||
got_pageoff,
|
||||
/// PC-relative distance to target page in a section (e.g., .rdata)
|
||||
page,
|
||||
/// Offset to a pointer relative to the start of a page in a section (e.g., .rdata)
|
||||
pageoff,
|
||||
/// PC-relative distance to target page in a import section
|
||||
import_page,
|
||||
/// Offset to a pointer relative to the start of a page in an import section (e.g., .rdata)
|
||||
import_pageoff,
|
||||
|
||||
// common
|
||||
import,
|
||||
direct, // as unsigned, TODO split into signed for x86
|
||||
/// Absolute pointer value
|
||||
direct,
|
||||
},
|
||||
target: SymbolWithLoc,
|
||||
offset: u32,
|
||||
@ -157,12 +167,14 @@ pub const Reloc = struct {
|
||||
=> return coff_file.getGotAtomForSymbol(self.target),
|
||||
|
||||
.direct,
|
||||
.branch_26,
|
||||
.page,
|
||||
.pageoff,
|
||||
=> return coff_file.getAtomForSymbol(self.target),
|
||||
|
||||
.import => return coff_file.getImportAtomForSymbol(self.target),
|
||||
.import,
|
||||
.import_page,
|
||||
.import_pageoff,
|
||||
=> return coff_file.getImportAtomForSymbol(self.target),
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -172,8 +184,6 @@ const BaseRelocationTable = std.AutoHashMapUnmanaged(*Atom, std.ArrayListUnmanag
|
||||
const UnnamedConstTable = std.AutoHashMapUnmanaged(Module.Decl.Index, std.ArrayListUnmanaged(*Atom));
|
||||
|
||||
const default_file_alignment: u16 = 0x200;
|
||||
const default_image_base_dll: u64 = 0x10000000;
|
||||
const default_image_base_exe: u64 = 0x400000;
|
||||
const default_size_of_stack_reserve: u32 = 0x1000000;
|
||||
const default_size_of_stack_commit: u32 = 0x1000;
|
||||
const default_size_of_heap_reserve: u32 = 0x100000;
|
||||
@ -891,6 +901,7 @@ fn resolveRelocs(self: *Coff, atom: *Atom) !void {
|
||||
const target_atom = reloc.getTargetAtom(self) orelse continue;
|
||||
const target_vaddr = target_atom.getSymbol(self).value;
|
||||
const target_vaddr_with_addend = target_vaddr + reloc.addend;
|
||||
const image_base = self.getImageBase();
|
||||
|
||||
log.debug(" ({x}: [() => 0x{x} ({s})) ({s}) (in file at 0x{x})", .{
|
||||
source_vaddr,
|
||||
@ -902,31 +913,23 @@ fn resolveRelocs(self: *Coff, atom: *Atom) !void {
|
||||
|
||||
reloc.dirty = false;
|
||||
|
||||
var buffer: [@sizeOf(u32)]u8 = undefined;
|
||||
switch (reloc.@"type") {
|
||||
.branch_26,
|
||||
.got_page,
|
||||
.got_pageoff,
|
||||
.page,
|
||||
.pageoff,
|
||||
=> {
|
||||
const amt = try self.base.file.?.preadAll(&buffer, file_offset + reloc.offset);
|
||||
if (amt != buffer.len) return error.InputOutput;
|
||||
switch (self.base.options.target.cpu.arch) {
|
||||
.aarch64 => {
|
||||
var buffer: [@sizeOf(u64)]u8 = undefined;
|
||||
switch (reloc.length) {
|
||||
2 => {
|
||||
const amt = try self.base.file.?.preadAll(buffer[0..4], file_offset + reloc.offset);
|
||||
if (amt != 4) return error.InputOutput;
|
||||
},
|
||||
3 => {
|
||||
const amt = try self.base.file.?.preadAll(&buffer, file_offset + reloc.offset);
|
||||
if (amt != 8) return error.InputOutput;
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
|
||||
switch (reloc.@"type") {
|
||||
.branch_26 => {
|
||||
const displacement = math.cast(i28, @intCast(i64, target_vaddr_with_addend) - @intCast(i64, source_vaddr)) orelse
|
||||
unreachable; // TODO generate thunks
|
||||
var inst = aarch64.Instruction{
|
||||
.unconditional_branch_immediate = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.unconditional_branch_immediate,
|
||||
), &buffer),
|
||||
};
|
||||
inst.unconditional_branch_immediate.imm26 = @truncate(u26, @bitCast(u28, displacement >> 2));
|
||||
mem.writeIntLittle(u32, &buffer, inst.toU32());
|
||||
},
|
||||
.got_page, .page => {
|
||||
.got_page, .import_page, .page => {
|
||||
const source_page = @intCast(i32, source_vaddr >> 12);
|
||||
const target_page = @intCast(i32, target_vaddr_with_addend >> 12);
|
||||
const pages = @bitCast(u21, @intCast(i21, target_page - source_page));
|
||||
@ -934,31 +937,31 @@ fn resolveRelocs(self: *Coff, atom: *Atom) !void {
|
||||
.pc_relative_address = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.pc_relative_address,
|
||||
), &buffer),
|
||||
), buffer[0..4]),
|
||||
};
|
||||
inst.pc_relative_address.immhi = @truncate(u19, pages >> 2);
|
||||
inst.pc_relative_address.immlo = @truncate(u2, pages);
|
||||
mem.writeIntLittle(u32, &buffer, inst.toU32());
|
||||
mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
|
||||
},
|
||||
.got_pageoff, .pageoff => {
|
||||
.got_pageoff, .import_pageoff, .pageoff => {
|
||||
assert(!reloc.pcrel);
|
||||
|
||||
const narrowed = @truncate(u12, @intCast(u64, target_vaddr_with_addend));
|
||||
if (isArithmeticOp(&buffer)) {
|
||||
if (isArithmeticOp(buffer[0..4])) {
|
||||
var inst = aarch64.Instruction{
|
||||
.add_subtract_immediate = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.add_subtract_immediate,
|
||||
), &buffer),
|
||||
), buffer[0..4]),
|
||||
};
|
||||
inst.add_subtract_immediate.imm12 = narrowed;
|
||||
mem.writeIntLittle(u32, &buffer, inst.toU32());
|
||||
mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
|
||||
} else {
|
||||
var inst = aarch64.Instruction{
|
||||
.load_store_register = mem.bytesToValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.load_store_register,
|
||||
), &buffer),
|
||||
), buffer[0..4]),
|
||||
};
|
||||
const offset: u12 = blk: {
|
||||
if (inst.load_store_register.size == 0) {
|
||||
@ -974,55 +977,73 @@ fn resolveRelocs(self: *Coff, atom: *Atom) !void {
|
||||
}
|
||||
};
|
||||
inst.load_store_register.offset = offset;
|
||||
mem.writeIntLittle(u32, &buffer, inst.toU32());
|
||||
mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
|
||||
}
|
||||
},
|
||||
.direct => {
|
||||
assert(!reloc.pcrel);
|
||||
switch (reloc.length) {
|
||||
2 => mem.writeIntLittle(
|
||||
u32,
|
||||
buffer[0..4],
|
||||
@truncate(u32, target_vaddr_with_addend + image_base),
|
||||
),
|
||||
3 => mem.writeIntLittle(u64, &buffer, target_vaddr_with_addend + image_base),
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
|
||||
else => unreachable,
|
||||
.got => unreachable,
|
||||
.import => unreachable,
|
||||
}
|
||||
|
||||
try self.base.file.?.pwriteAll(&buffer, file_offset + reloc.offset);
|
||||
|
||||
return;
|
||||
switch (reloc.length) {
|
||||
2 => try self.base.file.?.pwriteAll(buffer[0..4], file_offset + reloc.offset),
|
||||
3 => try self.base.file.?.pwriteAll(&buffer, file_offset + reloc.offset),
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
|
||||
else => {},
|
||||
}
|
||||
.x86_64, .i386 => {
|
||||
switch (reloc.@"type") {
|
||||
.got_page => unreachable,
|
||||
.got_pageoff => unreachable,
|
||||
.page => unreachable,
|
||||
.pageoff => unreachable,
|
||||
.import_page => unreachable,
|
||||
.import_pageoff => unreachable,
|
||||
|
||||
switch (reloc.@"type") {
|
||||
.branch_26 => unreachable,
|
||||
.got_page => unreachable,
|
||||
.got_pageoff => unreachable,
|
||||
.page => unreachable,
|
||||
.pageoff => unreachable,
|
||||
|
||||
.got, .import => {
|
||||
assert(reloc.pcrel);
|
||||
const disp = @intCast(i32, target_vaddr_with_addend) - @intCast(i32, source_vaddr) - 4;
|
||||
try self.base.file.?.pwriteAll(mem.asBytes(&disp), file_offset + reloc.offset);
|
||||
},
|
||||
.direct => {
|
||||
if (reloc.pcrel) {
|
||||
const disp = @intCast(i32, target_vaddr_with_addend) - @intCast(i32, source_vaddr) - 4;
|
||||
try self.base.file.?.pwriteAll(mem.asBytes(&disp), file_offset + reloc.offset);
|
||||
} else switch (self.ptr_width) {
|
||||
.p32 => try self.base.file.?.pwriteAll(
|
||||
mem.asBytes(&@intCast(u32, target_vaddr_with_addend + default_image_base_exe)),
|
||||
file_offset + reloc.offset,
|
||||
),
|
||||
.p64 => switch (reloc.length) {
|
||||
2 => try self.base.file.?.pwriteAll(
|
||||
mem.asBytes(&@truncate(u32, target_vaddr_with_addend + default_image_base_exe)),
|
||||
file_offset + reloc.offset,
|
||||
),
|
||||
3 => try self.base.file.?.pwriteAll(
|
||||
mem.asBytes(&(target_vaddr_with_addend + default_image_base_exe)),
|
||||
file_offset + reloc.offset,
|
||||
),
|
||||
else => unreachable,
|
||||
.got, .import => {
|
||||
assert(reloc.pcrel);
|
||||
const disp = @intCast(i32, target_vaddr_with_addend) - @intCast(i32, source_vaddr) - 4;
|
||||
try self.base.file.?.pwriteAll(mem.asBytes(&disp), file_offset + reloc.offset);
|
||||
},
|
||||
.direct => {
|
||||
if (reloc.pcrel) {
|
||||
const disp = @intCast(i32, target_vaddr_with_addend) - @intCast(i32, source_vaddr) - 4;
|
||||
try self.base.file.?.pwriteAll(mem.asBytes(&disp), file_offset + reloc.offset);
|
||||
} else switch (self.ptr_width) {
|
||||
.p32 => try self.base.file.?.pwriteAll(
|
||||
mem.asBytes(&@intCast(u32, target_vaddr_with_addend + image_base)),
|
||||
file_offset + reloc.offset,
|
||||
),
|
||||
.p64 => switch (reloc.length) {
|
||||
2 => try self.base.file.?.pwriteAll(
|
||||
mem.asBytes(&@truncate(u32, target_vaddr_with_addend + image_base)),
|
||||
file_offset + reloc.offset,
|
||||
),
|
||||
3 => try self.base.file.?.pwriteAll(
|
||||
mem.asBytes(&(target_vaddr_with_addend + image_base)),
|
||||
file_offset + reloc.offset,
|
||||
),
|
||||
else => unreachable,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
else => unreachable, // unhandled target architecture
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1950,11 +1971,7 @@ fn writeHeader(self: *Coff) !void {
|
||||
const subsystem: coff.Subsystem = .WINDOWS_CUI;
|
||||
const size_of_image: u32 = self.getSizeOfImage();
|
||||
const size_of_headers: u32 = mem.alignForwardGeneric(u32, self.getSizeOfHeaders(), default_file_alignment);
|
||||
const image_base = self.base.options.image_base_override orelse switch (self.base.options.output_mode) {
|
||||
.Exe => default_image_base_exe,
|
||||
.Lib => default_image_base_dll,
|
||||
else => unreachable,
|
||||
};
|
||||
const image_base = self.getImageBase();
|
||||
|
||||
const base_of_code = self.sections.get(self.text_section_index.?).header.virtual_address;
|
||||
const base_of_data = self.sections.get(self.data_section_index.?).header.virtual_address;
|
||||
@ -2161,6 +2178,19 @@ pub fn getEntryPoint(self: Coff) ?SymbolWithLoc {
|
||||
return self.globals.items[global_index];
|
||||
}
|
||||
|
||||
pub fn getImageBase(self: Coff) u64 {
|
||||
const image_base: u64 = self.base.options.image_base_override orelse switch (self.base.options.output_mode) {
|
||||
.Exe => switch (self.base.options.target.cpu.arch) {
|
||||
.aarch64 => 0x140000000,
|
||||
.x86_64, .i386 => 0x400000,
|
||||
else => unreachable, // unsupported target architecture
|
||||
},
|
||||
.Lib => 0x10000000,
|
||||
else => unreachable,
|
||||
};
|
||||
return image_base;
|
||||
}
|
||||
|
||||
/// Returns pointer-to-symbol described by `sym_loc` descriptor.
|
||||
pub fn getSymbolPtr(self: *Coff, sym_loc: SymbolWithLoc) *coff.Symbol {
|
||||
assert(sym_loc.file == null); // TODO linking object files
|
||||
|
Loading…
Reference in New Issue
Block a user