mirror of
https://github.com/ziglang/zig.git
synced 2024-11-13 23:52:57 +00:00
std.Target: Replace isARM() with isArmOrThumb() and rename it to isArm().
The old isARM() function was a portability trap. With the name it had, it seemed like the obviously correct function to use, but it didn't include Thumb. In the vast majority of cases where someone wants to ask "is the target Arm?", Thumb *should* be included. There are exactly 3 cases in the codebase where we do actually need to exclude Thumb, although one of those is in Aro and mirrors a check in Clang that is itself likely a bug. These rare cases can just add an extra isThumb() check.
This commit is contained in:
parent
4e843b4e2b
commit
c9e67e71c1
2
lib/compiler/aro/aro/toolchains/Linux.zig
vendored
2
lib/compiler/aro/aro/toolchains/Linux.zig
vendored
@ -40,7 +40,7 @@ fn buildExtraOpts(self: *Linux, tc: *const Toolchain) !void {
|
||||
self.extra_opts.appendAssumeCapacity("relro");
|
||||
}
|
||||
|
||||
if (target.cpu.arch.isARM() or target.cpu.arch.isAARCH64() or is_android) {
|
||||
if ((target.cpu.arch.isArm() and !target.cpu.arch.isThumb()) or target.cpu.arch.isAARCH64() or is_android) {
|
||||
try self.extra_opts.ensureUnusedCapacity(gpa, 2);
|
||||
self.extra_opts.appendAssumeCapacity("-z");
|
||||
self.extra_opts.appendAssumeCapacity("max-page-size=4096");
|
||||
|
@ -10,7 +10,7 @@ pub const panic = common.panic;
|
||||
|
||||
comptime {
|
||||
if (!builtin.is_test) {
|
||||
if (arch.isArmOrThumb()) {
|
||||
if (arch.isArm()) {
|
||||
@export(&__aeabi_unwind_cpp_pr0, .{ .name = "__aeabi_unwind_cpp_pr0", .linkage = common.linkage, .visibility = common.visibility });
|
||||
@export(&__aeabi_unwind_cpp_pr1, .{ .name = "__aeabi_unwind_cpp_pr1", .linkage = common.linkage, .visibility = common.visibility });
|
||||
@export(&__aeabi_unwind_cpp_pr2, .{ .name = "__aeabi_unwind_cpp_pr2", .linkage = common.linkage, .visibility = common.visibility });
|
||||
@ -206,7 +206,7 @@ fn __aeabi_drsub(a: f64, b: f64) callconv(.AAPCS) f64 {
|
||||
}
|
||||
|
||||
test "__aeabi_frsub" {
|
||||
if (!builtin.cpu.arch.isARM()) return error.SkipZigTest;
|
||||
if (!builtin.cpu.arch.isArm() or builtin.cpu.arch.isThumb()) return error.SkipZigTest;
|
||||
const inf32 = std.math.inf(f32);
|
||||
const maxf32 = std.math.floatMax(f32);
|
||||
const frsub_data = [_][3]f32{
|
||||
@ -226,14 +226,13 @@ test "__aeabi_frsub" {
|
||||
[_]f32{ maxf32, -maxf32, -inf32 },
|
||||
[_]f32{ -maxf32, maxf32, inf32 },
|
||||
};
|
||||
if (!builtin.cpu.arch.isARM()) return error.SkipZigTest;
|
||||
for (frsub_data) |data| {
|
||||
try std.testing.expectApproxEqAbs(data[2], __aeabi_frsub(data[0], data[1]), 0.001);
|
||||
}
|
||||
}
|
||||
|
||||
test "__aeabi_drsub" {
|
||||
if (!builtin.cpu.arch.isARM()) return error.SkipZigTest;
|
||||
if (!builtin.cpu.arch.isArm() or builtin.cpu.arch.isThumb()) return error.SkipZigTest;
|
||||
const inf64 = std.math.inf(f64);
|
||||
const maxf64 = std.math.floatMax(f64);
|
||||
const frsub_data = [_][3]f64{
|
||||
@ -253,7 +252,6 @@ test "__aeabi_drsub" {
|
||||
[_]f64{ maxf64, -maxf64, -inf64 },
|
||||
[_]f64{ -maxf64, maxf64, inf64 },
|
||||
};
|
||||
if (!builtin.cpu.arch.isARM()) return error.SkipZigTest;
|
||||
for (frsub_data) |data| {
|
||||
try std.testing.expectApproxEqAbs(data[2], __aeabi_drsub(data[0], data[1]), 0.000001);
|
||||
}
|
||||
|
@ -268,7 +268,7 @@ test "clzsi2" {
|
||||
try test__clzsi2(0xFE000000, 0);
|
||||
try test__clzsi2(0xFF000000, 0);
|
||||
// arm and thumb1 assume input a != 0
|
||||
if (!builtin.cpu.arch.isArmOrThumb())
|
||||
if (!builtin.cpu.arch.isArm())
|
||||
try test__clzsi2(0x00000000, 32);
|
||||
try test__clzsi2(0x00000001, 31);
|
||||
try test__clzsi2(0x00000002, 30);
|
||||
|
@ -29,7 +29,7 @@ pub const want_aeabi = switch (builtin.abi) {
|
||||
},
|
||||
else => false,
|
||||
};
|
||||
pub const want_mingw_arm_abi = builtin.cpu.arch.isArmOrThumb() and builtin.target.isMinGW();
|
||||
pub const want_mingw_arm_abi = builtin.cpu.arch.isArm() and builtin.target.isMinGW();
|
||||
|
||||
pub const want_ppc_abi = builtin.cpu.arch.isPowerPC();
|
||||
|
||||
|
@ -1379,17 +1379,11 @@ pub const Cpu = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn isARM(arch: Arch) bool {
|
||||
/// Note that this includes Thumb.
|
||||
pub inline fn isArm(arch: Arch) bool {
|
||||
return switch (arch) {
|
||||
.arm, .armeb => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn isAARCH64(arch: Arch) bool {
|
||||
return switch (arch) {
|
||||
.aarch64, .aarch64_be => true,
|
||||
else => false,
|
||||
else => arch.isThumb(),
|
||||
};
|
||||
}
|
||||
|
||||
@ -1400,8 +1394,11 @@ pub const Cpu = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn isArmOrThumb(arch: Arch) bool {
|
||||
return arch.isARM() or arch.isThumb();
|
||||
pub inline fn isAARCH64(arch: Arch) bool {
|
||||
return switch (arch) {
|
||||
.aarch64, .aarch64_be => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub inline fn isWasm(arch: Arch) bool {
|
||||
|
@ -263,7 +263,7 @@ test gamma {
|
||||
}
|
||||
|
||||
test "gamma.special" {
|
||||
if (builtin.cpu.arch.isArmOrThumb() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
if (builtin.cpu.arch.isArm() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
|
||||
inline for (&.{ f32, f64 }) |T| {
|
||||
try expect(std.math.isNan(gamma(T, -std.math.nan(T))));
|
||||
|
@ -32,7 +32,7 @@ test isSignalNan {
|
||||
// TODO: Signalling NaN values get converted to quiet NaN values in
|
||||
// some cases where they shouldn't such that this can fail.
|
||||
// See https://github.com/ziglang/zig/issues/14366
|
||||
if (!builtin.cpu.arch.isArmOrThumb() and
|
||||
if (!builtin.cpu.arch.isArm() and
|
||||
!builtin.cpu.arch.isAARCH64() and
|
||||
!builtin.cpu.arch.isPowerPC() and
|
||||
builtin.zig_backend != .stage2_c)
|
||||
|
@ -506,7 +506,7 @@ fn getauxvalImpl(index: usize) callconv(.C) usize {
|
||||
const require_aligned_register_pair =
|
||||
builtin.cpu.arch.isPowerPC32() or
|
||||
builtin.cpu.arch.isMIPS32() or
|
||||
builtin.cpu.arch.isArmOrThumb();
|
||||
builtin.cpu.arch.isArm();
|
||||
|
||||
// Split a 64bit value into a {LSB,MSB} pair.
|
||||
// The LE/BE variants specify the endianness to assume.
|
||||
@ -2331,7 +2331,7 @@ pub fn process_vm_writev(pid: pid_t, local: []const iovec_const, remote: []const
|
||||
}
|
||||
|
||||
pub fn fadvise(fd: fd_t, offset: i64, len: i64, advice: usize) usize {
|
||||
if (comptime native_arch.isArmOrThumb() or native_arch.isPowerPC32()) {
|
||||
if (comptime native_arch.isArm() or native_arch.isPowerPC32()) {
|
||||
// These architectures reorder the arguments so that a register is not skipped to align the
|
||||
// register number that `offset` is passed in.
|
||||
|
||||
|
@ -18,7 +18,7 @@ pub fn suggestVectorLengthForCpu(comptime T: type, comptime cpu: std.Target.Cpu)
|
||||
if (std.Target.x86.featureSetHasAny(cpu.features, .{ .prefer_256_bit, .avx2 }) and !std.Target.x86.featureSetHas(cpu.features, .prefer_128_bit)) break :blk 256;
|
||||
if (std.Target.x86.featureSetHas(cpu.features, .sse)) break :blk 128;
|
||||
if (std.Target.x86.featureSetHasAny(cpu.features, .{ .mmx, .@"3dnow" })) break :blk 64;
|
||||
} else if (cpu.arch.isArmOrThumb()) {
|
||||
} else if (cpu.arch.isArm()) {
|
||||
if (std.Target.arm.featureSetHas(cpu.features, .neon)) break :blk 128;
|
||||
} else if (cpu.arch.isAARCH64()) {
|
||||
// SVE allows up to 2048 bits in the specification, as of 2022 the most powerful machine has implemented 512-bit
|
||||
|
@ -495,7 +495,7 @@ fn posixCallMainAndExit(argc_argv_ptr: [*]usize) callconv(.C) noreturn {
|
||||
// ARMv6 targets (and earlier) have no support for TLS in hardware.
|
||||
// FIXME: Elide the check for targets >= ARMv7 when the target feature API
|
||||
// becomes less verbose (and more usable).
|
||||
if (comptime native_arch.isArmOrThumb()) {
|
||||
if (comptime native_arch.isArm()) {
|
||||
if (at_hwcap & std.os.linux.HWCAP.TLS == 0) {
|
||||
// FIXME: Make __aeabi_read_tp call the kernel helper kuser_get_tls
|
||||
// For the time being use a simple trap instead of a @panic call to
|
||||
|
@ -401,7 +401,7 @@ pub fn resolveTargetQuery(query: Target.Query) DetectError!Target {
|
||||
}
|
||||
|
||||
// https://github.com/llvm/llvm-project/issues/105978
|
||||
if (result.cpu.arch.isArmOrThumb() and result.floatAbi() == .soft) {
|
||||
if (result.cpu.arch.isArm() and result.floatAbi() == .soft) {
|
||||
result.cpu.features.removeFeature(@intFromEnum(Target.arm.Feature.vfp2));
|
||||
}
|
||||
|
||||
|
@ -5190,7 +5190,7 @@ fn asmInputNeedsLocal(f: *Function, constraint: []const u8, value: CValue) bool
|
||||
return switch (constraint[0]) {
|
||||
'{' => true,
|
||||
'i', 'r' => false,
|
||||
'I' => !target.cpu.arch.isArmOrThumb(),
|
||||
'I' => !target.cpu.arch.isArm(),
|
||||
else => switch (value) {
|
||||
.constant => |val| switch (dg.pt.zcu.intern_pool.indexToKey(val.toIntern())) {
|
||||
.ptr => |ptr| if (ptr.byte_offset == 0) switch (ptr.base_addr) {
|
||||
|
@ -494,7 +494,7 @@ const DataLayoutBuilder = struct {
|
||||
if (idx != size) try writer.print(":{d}", .{idx});
|
||||
}
|
||||
}
|
||||
if (self.target.cpu.arch.isArmOrThumb())
|
||||
if (self.target.cpu.arch.isArm())
|
||||
try writer.writeAll("-Fi8") // for thumb interwork
|
||||
else if (self.target.cpu.arch == .powerpc64 and
|
||||
self.target.os.tag != .freebsd and
|
||||
@ -761,7 +761,7 @@ const DataLayoutBuilder = struct {
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
.vector => if (self.target.cpu.arch.isArmOrThumb()) {
|
||||
.vector => if (self.target.cpu.arch.isArm()) {
|
||||
switch (size) {
|
||||
128 => abi = 64,
|
||||
else => {},
|
||||
@ -827,7 +827,7 @@ const DataLayoutBuilder = struct {
|
||||
else => {},
|
||||
},
|
||||
.aggregate => if (self.target.os.tag == .uefi or self.target.os.tag == .windows or
|
||||
self.target.cpu.arch.isArmOrThumb())
|
||||
self.target.cpu.arch.isArm())
|
||||
{
|
||||
pref = @min(pref, self.target.ptrBitWidth());
|
||||
} else switch (self.target.cpu.arch) {
|
||||
|
@ -440,7 +440,7 @@ fn start_asm_path(comp: *Compilation, arena: Allocator, basename: []const u8) ![
|
||||
try result.appendSlice("sparc" ++ s ++ "sparc32");
|
||||
}
|
||||
}
|
||||
} else if (arch.isARM()) {
|
||||
} else if (arch.isArm()) {
|
||||
try result.appendSlice("arm");
|
||||
} else if (arch.isMIPS()) {
|
||||
if (!mem.eql(u8, basename, "crti.S") and !mem.eql(u8, basename, "crtn.S")) {
|
||||
@ -604,7 +604,7 @@ fn add_include_dirs_arch(
|
||||
try args.append("-I");
|
||||
try args.append(try path.join(arena, &[_][]const u8{ dir, "x86" }));
|
||||
}
|
||||
} else if (arch.isARM()) {
|
||||
} else if (arch.isArm()) {
|
||||
if (opt_nptl) |nptl| {
|
||||
try args.append("-I");
|
||||
try args.append(try path.join(arena, &[_][]const u8{ dir, "arm", nptl }));
|
||||
|
@ -136,7 +136,7 @@ pub fn buildStaticLib(comp: *Compilation, prog_node: std.Progress.Node) BuildErr
|
||||
if (!comp.config.any_non_single_threaded) {
|
||||
try cflags.append("-D_LIBUNWIND_HAS_NO_THREADS");
|
||||
}
|
||||
if (target.cpu.arch.isArmOrThumb() and target.abi.floatAbi() == .hard) {
|
||||
if (target.cpu.arch.isArm() and target.abi.floatAbi() == .hard) {
|
||||
try cflags.append("-DCOMPILER_RT_ARMHF_TARGET");
|
||||
}
|
||||
try cflags.append("-Wno-bitwise-conditional-parentheses");
|
||||
|
@ -1865,12 +1865,10 @@ fn linkWithLLD(coff: *Coff, arena: Allocator, tid: Zcu.PerThread.Id, prog_node:
|
||||
try argv.append("-MACHINE:X86");
|
||||
} else if (target.cpu.arch == .x86_64) {
|
||||
try argv.append("-MACHINE:X64");
|
||||
} else if (target.cpu.arch.isARM()) {
|
||||
if (target.ptrBitWidth() == 32) {
|
||||
try argv.append("-MACHINE:ARM");
|
||||
} else {
|
||||
try argv.append("-MACHINE:ARM64");
|
||||
}
|
||||
} else if (target.cpu.arch == .thumb) {
|
||||
try argv.append("-MACHINE:ARM");
|
||||
} else if (target.cpu.arch == .aarch64) {
|
||||
try argv.append("-MACHINE:ARM64");
|
||||
}
|
||||
|
||||
for (comp.force_undefined_symbols.keys()) |symbol| {
|
||||
|
@ -1823,7 +1823,7 @@ fn linkWithLLD(self: *Elf, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: s
|
||||
}
|
||||
|
||||
if (link_mode == .static) {
|
||||
if (target.cpu.arch.isArmOrThumb()) {
|
||||
if (target.cpu.arch.isArm()) {
|
||||
try argv.append("-Bstatic");
|
||||
} else {
|
||||
try argv.append("-static");
|
||||
|
@ -124,7 +124,7 @@ test "@floatFromInt(f80)" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
@ -1362,7 +1362,7 @@ test "cast f16 to wider types" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
|
@ -522,7 +522,7 @@ test "runtime 128 bit integer division" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
|
@ -126,7 +126,7 @@ test "cmp f16" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
if (builtin.cpu.arch.isArmOrThumb() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
if (builtin.cpu.arch.isArm() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
|
||||
try testCmp(f16);
|
||||
try comptime testCmp(f16);
|
||||
@ -135,7 +135,7 @@ test "cmp f16" {
|
||||
test "cmp f32/f64" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.cpu.arch.isArmOrThumb() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
if (builtin.cpu.arch.isArm() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
|
||||
try testCmp(f32);
|
||||
try comptime testCmp(f32);
|
||||
@ -146,7 +146,7 @@ test "cmp f32/f64" {
|
||||
test "cmp f128" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
@ -1009,7 +1009,7 @@ test "@abs f32/f64" {
|
||||
test "@abs f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
@ -1134,7 +1134,7 @@ test "@floor f32/f64" {
|
||||
test "@floor f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
@ -1232,7 +1232,7 @@ test "@ceil f32/f64" {
|
||||
test "@ceil f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
@ -1330,7 +1330,7 @@ test "@trunc f32/f64" {
|
||||
test "@trunc f80/f128/c_longdouble" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
|
@ -780,7 +780,7 @@ test "128-bit multiplication" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
{
|
||||
@ -1369,7 +1369,7 @@ test "remainder division" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
if (builtin.zig_backend == .stage2_llvm and builtin.os.tag == .windows) {
|
||||
@ -1522,7 +1522,7 @@ test "@round f80" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
@ -1535,7 +1535,7 @@ test "@round f128" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
@ -1579,7 +1579,7 @@ test "NaN comparison" {
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
if (builtin.cpu.arch.isArmOrThumb() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
if (builtin.cpu.arch.isArm() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
|
||||
try testNanEqNan(f16);
|
||||
try testNanEqNan(f32);
|
||||
@ -1735,7 +1735,7 @@ test "runtime comparison to NaN is comptime-known" {
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
if (builtin.cpu.arch.isArmOrThumb() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
if (builtin.cpu.arch.isArm() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest(comptime F: type, x: F) void {
|
||||
@ -1766,7 +1766,7 @@ test "runtime int comparison to inf is comptime-known" {
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
if (builtin.cpu.arch.isArmOrThumb() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
if (builtin.cpu.arch.isArm() and builtin.target.floatAbi() == .soft) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21234
|
||||
|
||||
const S = struct {
|
||||
fn doTheTest(comptime F: type, x: u32) void {
|
||||
|
@ -122,7 +122,7 @@ test "@min/max for floats" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
|
@ -58,7 +58,7 @@ test "@mulAdd f80" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
@ -79,7 +79,7 @@ test "@mulAdd f128" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64 and builtin.target.ofmt != .elf and builtin.target.ofmt != .macho) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
@ -189,7 +189,7 @@ test "vector f80" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try comptime vector80();
|
||||
@ -216,7 +216,7 @@ test "vector f128" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
try comptime vector128();
|
||||
|
@ -164,7 +164,7 @@ test "saturating multiplication <= 32 bits" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
if (builtin.zig_backend == .stage2_llvm and builtin.cpu.arch == .wasm32) {
|
||||
@ -264,7 +264,7 @@ test "saturating multiplication" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
if (builtin.zig_backend == .stage2_llvm and builtin.cpu.arch == .wasm32) {
|
||||
|
@ -419,7 +419,7 @@ test "packed struct 24bits" {
|
||||
if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.cpu.arch == .wasm32) return error.SkipZigTest; // TODO
|
||||
if (comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest; // TODO
|
||||
if (comptime builtin.cpu.arch.isArm()) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
|
||||
@ -818,7 +818,7 @@ test "non-packed struct with u128 entry in union" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const U = union(enum) {
|
||||
@ -941,7 +941,7 @@ test "tuple assigned to variable" {
|
||||
|
||||
test "comptime struct field" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest; // TODO
|
||||
if (comptime builtin.cpu.arch.isArm()) return error.SkipZigTest; // TODO
|
||||
|
||||
const T = struct {
|
||||
a: i32,
|
||||
|
@ -101,7 +101,7 @@ test "vector float operators" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
|
||||
|
||||
if (builtin.zig_backend == .stage2_llvm and builtin.cpu.arch == .aarch64) {
|
||||
@ -753,7 +753,7 @@ test "vector reduce operation" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArmOrThumb()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c and comptime builtin.cpu.arch.isArm()) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
if (builtin.cpu.arch.isMIPS64()) return error.SkipZigTest; // https://github.com/ziglang/zig/issues/21091
|
||||
|
||||
|
@ -10,7 +10,7 @@ const builtin = @import("builtin");
|
||||
const print = std.debug.print;
|
||||
const expect = std.testing.expect;
|
||||
const expectEqual = std.testing.expectEqual;
|
||||
const have_i128 = builtin.cpu.arch != .x86 and !builtin.cpu.arch.isARM() and
|
||||
const have_i128 = builtin.cpu.arch != .x86 and !builtin.cpu.arch.isArm() and
|
||||
!builtin.cpu.arch.isMIPS() and !builtin.cpu.arch.isPowerPC32();
|
||||
|
||||
const have_f128 = builtin.cpu.arch.isX86() and !builtin.os.tag.isDarwin();
|
||||
@ -181,7 +181,7 @@ extern fn c_cmultf(a: ComplexFloat, b: ComplexFloat) ComplexFloat;
|
||||
extern fn c_cmultd(a: ComplexDouble, b: ComplexDouble) ComplexDouble;
|
||||
|
||||
const complex_abi_compatible = builtin.cpu.arch != .x86 and !builtin.cpu.arch.isMIPS() and
|
||||
!builtin.cpu.arch.isARM() and !builtin.cpu.arch.isPowerPC32() and !builtin.cpu.arch.isRISCV();
|
||||
!builtin.cpu.arch.isArm() and !builtin.cpu.arch.isPowerPC32() and !builtin.cpu.arch.isRISCV();
|
||||
|
||||
test "C ABI complex float" {
|
||||
if (!complex_abi_compatible) return error.SkipZigTest;
|
||||
@ -5553,7 +5553,7 @@ extern fn c_f16_struct(f16_struct) f16_struct;
|
||||
test "f16 struct" {
|
||||
if (builtin.target.cpu.arch.isMIPS64()) return error.SkipZigTest;
|
||||
if (builtin.target.cpu.arch.isPowerPC32()) return error.SkipZigTest;
|
||||
if (builtin.cpu.arch.isARM() and builtin.mode != .Debug) return error.SkipZigTest;
|
||||
if (builtin.cpu.arch.isArm() and builtin.mode != .Debug) return error.SkipZigTest;
|
||||
|
||||
const a = c_f16_struct(.{ .a = 12 });
|
||||
try expect(a.a == 34);
|
||||
|
Loading…
Reference in New Issue
Block a user