2020-08-08 05:35:15 +00:00
|
|
|
//! The standard memory allocation interface.
|
|
|
|
|
|
|
|
const std = @import("../std.zig");
|
|
|
|
const assert = std.debug.assert;
|
|
|
|
const math = std.math;
|
|
|
|
const mem = std.mem;
|
|
|
|
const Allocator = @This();
|
2021-11-06 00:54:35 +00:00
|
|
|
const builtin = @import("builtin");
|
2020-08-08 05:35:15 +00:00
|
|
|
|
|
|
|
pub const Error = error{OutOfMemory};
|
2022-11-27 08:07:35 +00:00
|
|
|
pub const Log2Align = math.Log2Int(usize);
|
2020-08-08 05:35:15 +00:00
|
|
|
|
2021-10-28 23:37:25 +00:00
|
|
|
// The type erased pointer to the allocator implementation
|
2021-12-19 05:24:45 +00:00
|
|
|
ptr: *anyopaque,
|
2021-10-29 03:17:21 +00:00
|
|
|
vtable: *const VTable,
|
|
|
|
|
|
|
|
pub const VTable = struct {
|
2022-11-27 08:07:35 +00:00
|
|
|
/// Attempt to allocate exactly `len` bytes aligned to `1 << ptr_align`.
|
2021-10-29 03:17:21 +00:00
|
|
|
///
|
2022-11-27 08:07:35 +00:00
|
|
|
/// `ret_addr` is optionally provided as the first return address of the
|
|
|
|
/// allocation call stack. If the value is `0` it means no return address
|
|
|
|
/// has been provided.
|
2022-12-07 03:35:50 +00:00
|
|
|
alloc: *const fn (ctx: *anyopaque, len: usize, ptr_align: u8, ret_addr: usize) ?[*]u8,
|
2022-11-27 08:07:35 +00:00
|
|
|
|
|
|
|
/// Attempt to expand or shrink memory in place. `buf.len` must equal the
|
|
|
|
/// length requested from the most recent successful call to `alloc` or
|
|
|
|
/// `resize`. `buf_align` must equal the same value that was passed as the
|
|
|
|
/// `ptr_align` parameter to the original `alloc` call.
|
2021-10-29 03:17:21 +00:00
|
|
|
///
|
2022-11-27 08:07:35 +00:00
|
|
|
/// A result of `true` indicates the resize was successful and the
|
|
|
|
/// allocation now has the same address but a size of `new_len`. `false`
|
|
|
|
/// indicates the resize could not be completed without moving the
|
|
|
|
/// allocation to a different address.
|
2021-10-29 03:17:21 +00:00
|
|
|
///
|
2022-11-27 08:07:35 +00:00
|
|
|
/// `new_len` must be greater than zero.
|
2021-10-29 03:17:21 +00:00
|
|
|
///
|
2022-11-27 08:07:35 +00:00
|
|
|
/// `ret_addr` is optionally provided as the first return address of the
|
|
|
|
/// allocation call stack. If the value is `0` it means no return address
|
|
|
|
/// has been provided.
|
2022-12-07 03:35:50 +00:00
|
|
|
resize: *const fn (ctx: *anyopaque, buf: []u8, buf_align: u8, new_len: usize, ret_addr: usize) bool,
|
2022-11-27 08:07:35 +00:00
|
|
|
|
|
|
|
/// Free and invalidate a buffer.
|
2021-10-29 03:17:21 +00:00
|
|
|
///
|
2022-11-27 08:07:35 +00:00
|
|
|
/// `buf.len` must equal the most recent length returned by `alloc` or
|
|
|
|
/// given to a successful `resize` call.
|
2021-10-29 03:17:21 +00:00
|
|
|
///
|
2022-11-27 08:07:35 +00:00
|
|
|
/// `buf_align` must equal the same value that was passed as the
|
|
|
|
/// `ptr_align` parameter to the original `alloc` call.
|
2021-10-29 03:17:21 +00:00
|
|
|
///
|
2022-11-27 08:07:35 +00:00
|
|
|
/// `ret_addr` is optionally provided as the first return address of the
|
|
|
|
/// allocation call stack. If the value is `0` it means no return address
|
|
|
|
/// has been provided.
|
2022-12-07 03:35:50 +00:00
|
|
|
free: *const fn (ctx: *anyopaque, buf: []u8, buf_align: u8, ret_addr: usize) void,
|
2021-10-29 03:17:21 +00:00
|
|
|
};
|
2021-10-28 23:37:25 +00:00
|
|
|
|
2022-11-27 08:07:35 +00:00
|
|
|
pub fn noResize(
|
|
|
|
self: *anyopaque,
|
|
|
|
buf: []u8,
|
|
|
|
log2_buf_align: u8,
|
|
|
|
new_len: usize,
|
|
|
|
ret_addr: usize,
|
|
|
|
) bool {
|
|
|
|
_ = self;
|
|
|
|
_ = buf;
|
|
|
|
_ = log2_buf_align;
|
|
|
|
_ = new_len;
|
|
|
|
_ = ret_addr;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn noFree(
|
|
|
|
self: *anyopaque,
|
|
|
|
buf: []u8,
|
|
|
|
log2_buf_align: u8,
|
|
|
|
ret_addr: usize,
|
|
|
|
) void {
|
|
|
|
_ = self;
|
|
|
|
_ = buf;
|
|
|
|
_ = log2_buf_align;
|
|
|
|
_ = ret_addr;
|
2021-11-06 00:54:35 +00:00
|
|
|
}
|
|
|
|
|
2022-11-27 08:07:35 +00:00
|
|
|
/// This function is not intended to be called except from within the
|
|
|
|
/// implementation of an Allocator
|
|
|
|
pub inline fn rawAlloc(self: Allocator, len: usize, ptr_align: u8, ret_addr: usize) ?[*]u8 {
|
|
|
|
return self.vtable.alloc(self.ptr, len, ptr_align, ret_addr);
|
2021-11-06 00:54:35 +00:00
|
|
|
}
|
|
|
|
|
2022-11-27 08:07:35 +00:00
|
|
|
/// This function is not intended to be called except from within the
|
|
|
|
/// implementation of an Allocator
|
|
|
|
pub inline fn rawResize(self: Allocator, buf: []u8, log2_buf_align: u8, new_len: usize, ret_addr: usize) bool {
|
|
|
|
return self.vtable.resize(self.ptr, buf, log2_buf_align, new_len, ret_addr);
|
2021-11-06 00:54:35 +00:00
|
|
|
}
|
|
|
|
|
2022-11-27 08:07:35 +00:00
|
|
|
/// This function is not intended to be called except from within the
|
|
|
|
/// implementation of an Allocator
|
|
|
|
pub inline fn rawFree(self: Allocator, buf: []u8, log2_buf_align: u8, ret_addr: usize) void {
|
|
|
|
return self.vtable.free(self.ptr, buf, log2_buf_align, ret_addr);
|
2021-11-06 00:54:35 +00:00
|
|
|
}
|
|
|
|
|
2020-08-08 05:35:15 +00:00
|
|
|
/// Returns a pointer to undefined memory.
|
|
|
|
/// Call `destroy` with the result to free the memory.
|
2021-10-28 23:37:25 +00:00
|
|
|
pub fn create(self: Allocator, comptime T: type) Error!*T {
|
2022-11-27 08:07:35 +00:00
|
|
|
if (@sizeOf(T) == 0) return @intToPtr(*T, math.maxInt(usize));
|
|
|
|
const slice = try self.allocAdvancedWithRetAddr(T, null, 1, @returnAddress());
|
2020-08-08 05:35:15 +00:00
|
|
|
return &slice[0];
|
|
|
|
}
|
|
|
|
|
|
|
|
/// `ptr` should be the return value of `create`, or otherwise
|
|
|
|
/// have the same address and alignment property.
|
2021-10-28 23:37:25 +00:00
|
|
|
pub fn destroy(self: Allocator, ptr: anytype) void {
|
2020-09-03 15:09:55 +00:00
|
|
|
const info = @typeInfo(@TypeOf(ptr)).Pointer;
|
|
|
|
const T = info.child;
|
2020-08-08 05:35:15 +00:00
|
|
|
if (@sizeOf(T) == 0) return;
|
|
|
|
const non_const_ptr = @intToPtr([*]u8, @ptrToInt(ptr));
|
2022-11-27 08:07:35 +00:00
|
|
|
self.rawFree(non_const_ptr[0..@sizeOf(T)], math.log2(info.alignment), @returnAddress());
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Allocates an array of `n` items of type `T` and sets all the
|
|
|
|
/// items to `undefined`. Depending on the Allocator
|
|
|
|
/// implementation, it may be required to call `free` once the
|
|
|
|
/// memory is no longer needed, to avoid a resource leak. If the
|
|
|
|
/// `Allocator` implementation is unknown, then correct code will
|
|
|
|
/// call `free` when done.
|
|
|
|
///
|
|
|
|
/// For allocating a single item, see `create`.
|
2021-10-28 23:37:25 +00:00
|
|
|
pub fn alloc(self: Allocator, comptime T: type, n: usize) Error![]T {
|
2022-11-27 08:07:35 +00:00
|
|
|
return self.allocAdvancedWithRetAddr(T, null, n, @returnAddress());
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn allocWithOptions(
|
2021-10-28 23:37:25 +00:00
|
|
|
self: Allocator,
|
2020-08-08 05:35:15 +00:00
|
|
|
comptime Elem: type,
|
|
|
|
n: usize,
|
|
|
|
/// null means naturally aligned
|
|
|
|
comptime optional_alignment: ?u29,
|
|
|
|
comptime optional_sentinel: ?Elem,
|
2020-08-08 07:34:13 +00:00
|
|
|
) Error!AllocWithOptionsPayload(Elem, optional_alignment, optional_sentinel) {
|
|
|
|
return self.allocWithOptionsRetAddr(Elem, n, optional_alignment, optional_sentinel, @returnAddress());
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn allocWithOptionsRetAddr(
|
2021-10-28 23:37:25 +00:00
|
|
|
self: Allocator,
|
2020-08-08 07:34:13 +00:00
|
|
|
comptime Elem: type,
|
|
|
|
n: usize,
|
|
|
|
/// null means naturally aligned
|
|
|
|
comptime optional_alignment: ?u29,
|
|
|
|
comptime optional_sentinel: ?Elem,
|
|
|
|
return_address: usize,
|
2020-08-08 05:35:15 +00:00
|
|
|
) Error!AllocWithOptionsPayload(Elem, optional_alignment, optional_sentinel) {
|
|
|
|
if (optional_sentinel) |sentinel| {
|
2022-11-27 08:07:35 +00:00
|
|
|
const ptr = try self.allocAdvancedWithRetAddr(Elem, optional_alignment, n + 1, return_address);
|
2020-08-08 05:35:15 +00:00
|
|
|
ptr[n] = sentinel;
|
|
|
|
return ptr[0..n :sentinel];
|
|
|
|
} else {
|
2022-11-27 08:07:35 +00:00
|
|
|
return self.allocAdvancedWithRetAddr(Elem, optional_alignment, n, return_address);
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn AllocWithOptionsPayload(comptime Elem: type, comptime alignment: ?u29, comptime sentinel: ?Elem) type {
|
|
|
|
if (sentinel) |s| {
|
|
|
|
return [:s]align(alignment orelse @alignOf(Elem)) Elem;
|
|
|
|
} else {
|
|
|
|
return []align(alignment orelse @alignOf(Elem)) Elem;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Allocates an array of `n + 1` items of type `T` and sets the first `n`
|
|
|
|
/// items to `undefined` and the last item to `sentinel`. Depending on the
|
|
|
|
/// Allocator implementation, it may be required to call `free` once the
|
|
|
|
/// memory is no longer needed, to avoid a resource leak. If the
|
|
|
|
/// `Allocator` implementation is unknown, then correct code will
|
|
|
|
/// call `free` when done.
|
|
|
|
///
|
|
|
|
/// For allocating a single item, see `create`.
|
2020-08-08 07:34:13 +00:00
|
|
|
pub fn allocSentinel(
|
2021-10-28 23:37:25 +00:00
|
|
|
self: Allocator,
|
2020-08-08 07:34:13 +00:00
|
|
|
comptime Elem: type,
|
|
|
|
n: usize,
|
|
|
|
comptime sentinel: Elem,
|
|
|
|
) Error![:sentinel]Elem {
|
|
|
|
return self.allocWithOptionsRetAddr(Elem, n, null, sentinel, @returnAddress());
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn alignedAlloc(
|
2021-10-28 23:37:25 +00:00
|
|
|
self: Allocator,
|
2020-08-08 05:35:15 +00:00
|
|
|
comptime T: type,
|
|
|
|
/// null means naturally aligned
|
|
|
|
comptime alignment: ?u29,
|
|
|
|
n: usize,
|
|
|
|
) Error![]align(alignment orelse @alignOf(T)) T {
|
2022-11-27 08:07:35 +00:00
|
|
|
return self.allocAdvancedWithRetAddr(T, alignment, n, @returnAddress());
|
2020-08-08 07:34:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn allocAdvancedWithRetAddr(
|
2021-10-28 23:37:25 +00:00
|
|
|
self: Allocator,
|
2020-08-08 07:34:13 +00:00
|
|
|
comptime T: type,
|
|
|
|
/// null means naturally aligned
|
|
|
|
comptime alignment: ?u29,
|
|
|
|
n: usize,
|
|
|
|
return_address: usize,
|
2020-08-08 05:35:15 +00:00
|
|
|
) Error![]align(alignment orelse @alignOf(T)) T {
|
|
|
|
const a = if (alignment) |a| blk: {
|
2022-11-27 08:07:35 +00:00
|
|
|
if (a == @alignOf(T)) return allocAdvancedWithRetAddr(self, T, null, n, return_address);
|
2020-08-08 05:35:15 +00:00
|
|
|
break :blk a;
|
|
|
|
} else @alignOf(T);
|
|
|
|
|
2022-11-27 08:07:35 +00:00
|
|
|
// The Zig Allocator interface is not intended to solve allocations beyond
|
|
|
|
// the minimum OS page size. For these use cases, the caller must use OS
|
|
|
|
// APIs directly.
|
|
|
|
comptime assert(a <= mem.page_size);
|
|
|
|
|
2020-08-08 05:35:15 +00:00
|
|
|
if (n == 0) {
|
2022-11-27 08:07:35 +00:00
|
|
|
const ptr = comptime std.mem.alignBackward(math.maxInt(usize), a);
|
2022-10-25 13:09:51 +00:00
|
|
|
return @intToPtr([*]align(a) T, ptr)[0..0];
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const byte_count = math.mul(usize, @sizeOf(T), n) catch return Error.OutOfMemory;
|
2022-11-27 08:07:35 +00:00
|
|
|
const byte_ptr = self.rawAlloc(byte_count, log2a(a), return_address) orelse return Error.OutOfMemory;
|
2020-08-08 05:35:15 +00:00
|
|
|
// TODO: https://github.com/ziglang/zig/issues/4298
|
2022-11-27 08:07:35 +00:00
|
|
|
@memset(byte_ptr, undefined, byte_count);
|
|
|
|
const byte_slice = byte_ptr[0..byte_count];
|
|
|
|
return mem.bytesAsSlice(T, @alignCast(a, byte_slice));
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
|
2022-11-27 08:07:35 +00:00
|
|
|
/// Requests to modify the size of an allocation. It is guaranteed to not move
|
|
|
|
/// the pointer, however the allocator implementation may refuse the resize
|
|
|
|
/// request by returning `false`.
|
|
|
|
pub fn resize(self: Allocator, old_mem: anytype, new_n: usize) bool {
|
2020-08-08 05:35:15 +00:00
|
|
|
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
|
|
|
|
const T = Slice.child;
|
|
|
|
if (new_n == 0) {
|
|
|
|
self.free(old_mem);
|
2022-11-27 08:07:35 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
if (old_mem.len == 0) {
|
|
|
|
return false;
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
const old_byte_slice = mem.sliceAsBytes(old_mem);
|
2022-11-27 08:07:35 +00:00
|
|
|
// I would like to use saturating multiplication here, but LLVM cannot lower it
|
|
|
|
// on WebAssembly: https://github.com/ziglang/zig/issues/9660
|
|
|
|
//const new_byte_count = new_n *| @sizeOf(T);
|
|
|
|
const new_byte_count = math.mul(usize, @sizeOf(T), new_n) catch return false;
|
|
|
|
return self.rawResize(old_byte_slice, log2a(Slice.alignment), new_byte_count, @returnAddress());
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
|
2022-11-27 08:07:35 +00:00
|
|
|
/// This function requests a new byte size for an existing allocation, which
|
|
|
|
/// can be larger, smaller, or the same size as the old memory allocation.
|
2020-08-08 05:35:15 +00:00
|
|
|
/// If `new_n` is 0, this is the same as `free` and it always succeeds.
|
2021-10-28 23:37:25 +00:00
|
|
|
pub fn realloc(self: Allocator, old_mem: anytype, new_n: usize) t: {
|
2020-08-08 05:35:15 +00:00
|
|
|
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
|
|
|
|
break :t Error![]align(Slice.alignment) Slice.child;
|
|
|
|
} {
|
2022-11-27 08:07:35 +00:00
|
|
|
return self.reallocAdvanced(old_mem, new_n, @returnAddress());
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
|
2020-08-08 07:34:13 +00:00
|
|
|
pub fn reallocAdvanced(
|
2021-10-28 23:37:25 +00:00
|
|
|
self: Allocator,
|
2020-08-08 05:35:15 +00:00
|
|
|
old_mem: anytype,
|
|
|
|
new_n: usize,
|
2020-08-08 07:34:13 +00:00
|
|
|
return_address: usize,
|
2022-11-27 08:07:35 +00:00
|
|
|
) t: {
|
|
|
|
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
|
|
|
|
break :t Error![]align(Slice.alignment) Slice.child;
|
|
|
|
} {
|
2020-08-08 05:35:15 +00:00
|
|
|
const Slice = @typeInfo(@TypeOf(old_mem)).Pointer;
|
|
|
|
const T = Slice.child;
|
|
|
|
if (old_mem.len == 0) {
|
2022-11-27 08:07:35 +00:00
|
|
|
return self.allocAdvancedWithRetAddr(T, Slice.alignment, new_n, return_address);
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
if (new_n == 0) {
|
|
|
|
self.free(old_mem);
|
2022-11-27 08:07:35 +00:00
|
|
|
const ptr = comptime std.mem.alignBackward(math.maxInt(usize), Slice.alignment);
|
|
|
|
return @intToPtr([*]align(Slice.alignment) T, ptr)[0..0];
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const old_byte_slice = mem.sliceAsBytes(old_mem);
|
|
|
|
const byte_count = math.mul(usize, @sizeOf(T), new_n) catch return Error.OutOfMemory;
|
|
|
|
// Note: can't set shrunk memory to undefined as memory shouldn't be modified on realloc failure
|
2022-11-27 08:07:35 +00:00
|
|
|
if (mem.isAligned(@ptrToInt(old_byte_slice.ptr), Slice.alignment)) {
|
|
|
|
if (self.rawResize(old_byte_slice, log2a(Slice.alignment), byte_count, return_address)) {
|
|
|
|
return mem.bytesAsSlice(T, @alignCast(Slice.alignment, old_byte_slice.ptr[0..byte_count]));
|
2021-11-07 01:40:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-27 08:07:35 +00:00
|
|
|
const new_mem = self.rawAlloc(byte_count, log2a(Slice.alignment), return_address) orelse
|
2021-11-07 01:40:06 +00:00
|
|
|
return error.OutOfMemory;
|
2022-11-27 08:07:35 +00:00
|
|
|
@memcpy(new_mem, old_byte_slice.ptr, @min(byte_count, old_byte_slice.len));
|
2021-11-07 01:40:06 +00:00
|
|
|
// TODO https://github.com/ziglang/zig/issues/4298
|
|
|
|
@memset(old_byte_slice.ptr, undefined, old_byte_slice.len);
|
2022-11-27 08:07:35 +00:00
|
|
|
self.rawFree(old_byte_slice, log2a(Slice.alignment), return_address);
|
2020-08-08 05:35:15 +00:00
|
|
|
|
2022-11-27 08:07:35 +00:00
|
|
|
return mem.bytesAsSlice(T, @alignCast(Slice.alignment, new_mem[0..byte_count]));
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Free an array allocated with `alloc`. To free a single item,
|
|
|
|
/// see `destroy`.
|
2021-10-28 23:37:25 +00:00
|
|
|
pub fn free(self: Allocator, memory: anytype) void {
|
2020-08-08 05:35:15 +00:00
|
|
|
const Slice = @typeInfo(@TypeOf(memory)).Pointer;
|
|
|
|
const bytes = mem.sliceAsBytes(memory);
|
|
|
|
const bytes_len = bytes.len + if (Slice.sentinel != null) @sizeOf(Slice.child) else 0;
|
|
|
|
if (bytes_len == 0) return;
|
|
|
|
const non_const_ptr = @intToPtr([*]u8, @ptrToInt(bytes.ptr));
|
|
|
|
// TODO: https://github.com/ziglang/zig/issues/4298
|
|
|
|
@memset(non_const_ptr, undefined, bytes_len);
|
2022-11-27 08:07:35 +00:00
|
|
|
self.rawFree(non_const_ptr[0..bytes_len], log2a(Slice.alignment), @returnAddress());
|
2020-08-08 05:35:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Copies `m` to newly allocated memory. Caller owns the memory.
|
2021-10-28 23:37:25 +00:00
|
|
|
pub fn dupe(allocator: Allocator, comptime T: type, m: []const T) ![]T {
|
2020-08-08 05:35:15 +00:00
|
|
|
const new_buf = try allocator.alloc(T, m.len);
|
|
|
|
mem.copy(T, new_buf, m);
|
|
|
|
return new_buf;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Copies `m` to newly allocated memory, with a null-terminated element. Caller owns the memory.
|
2021-10-28 23:37:25 +00:00
|
|
|
pub fn dupeZ(allocator: Allocator, comptime T: type, m: []const T) ![:0]T {
|
2020-08-08 05:35:15 +00:00
|
|
|
const new_buf = try allocator.alloc(T, m.len + 1);
|
|
|
|
mem.copy(T, new_buf, m);
|
|
|
|
new_buf[m.len] = 0;
|
|
|
|
return new_buf[0..m.len :0];
|
|
|
|
}
|
2020-08-08 07:34:13 +00:00
|
|
|
|
2022-11-27 08:07:35 +00:00
|
|
|
/// TODO replace callsites with `@log2` after this proposal is implemented:
|
|
|
|
/// https://github.com/ziglang/zig/issues/13642
|
|
|
|
inline fn log2a(x: anytype) switch (@typeInfo(@TypeOf(x))) {
|
|
|
|
.Int => math.Log2Int(@TypeOf(x)),
|
|
|
|
.ComptimeInt => comptime_int,
|
|
|
|
else => @compileError("int please"),
|
|
|
|
} {
|
|
|
|
switch (@typeInfo(@TypeOf(x))) {
|
|
|
|
.Int => return math.log2_int(@TypeOf(x), x),
|
|
|
|
.ComptimeInt => return math.log2(x),
|
|
|
|
else => @compileError("bad"),
|
2021-12-19 06:58:13 +00:00
|
|
|
}
|
|
|
|
}
|