mirror of
https://github.com/ziglang/zig.git
synced 2024-11-15 08:33:06 +00:00
Merge pull request #21682 from der-teufel-programming/remove-packedintarray
Remove PackedIntArray
This commit is contained in:
commit
3bf89f55c2
44
lib/compiler/aro/aro/Preprocessor.zig
vendored
44
lib/compiler/aro/aro/Preprocessor.zig
vendored
@ -389,7 +389,7 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!TokenWithExpans
|
||||
try pp.ensureTotalTokenCapacity(pp.tokens.len + estimated_token_count);
|
||||
|
||||
var if_level: u8 = 0;
|
||||
var if_kind = std.PackedIntArray(u2, 256).init([1]u2{0} ** 256);
|
||||
var if_kind: [64]u8 = .{0} ** 64;
|
||||
const until_else = 0;
|
||||
const until_endif = 1;
|
||||
const until_endif_seen_else = 2;
|
||||
@ -430,12 +430,12 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!TokenWithExpans
|
||||
if_level = sum;
|
||||
|
||||
if (try pp.expr(&tokenizer)) {
|
||||
if_kind.set(if_level, until_endif);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_endif);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering then branch of #if", .{});
|
||||
}
|
||||
} else {
|
||||
if_kind.set(if_level, until_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_else);
|
||||
try pp.skip(&tokenizer, .until_else);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering else branch of #if", .{});
|
||||
@ -451,12 +451,12 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!TokenWithExpans
|
||||
const macro_name = (try pp.expectMacroName(&tokenizer)) orelse continue;
|
||||
try pp.expectNl(&tokenizer);
|
||||
if (pp.defines.get(macro_name) != null) {
|
||||
if_kind.set(if_level, until_endif);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_endif);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering then branch of #ifdef", .{});
|
||||
}
|
||||
} else {
|
||||
if_kind.set(if_level, until_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_else);
|
||||
try pp.skip(&tokenizer, .until_else);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering else branch of #ifdef", .{});
|
||||
@ -472,9 +472,9 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!TokenWithExpans
|
||||
const macro_name = (try pp.expectMacroName(&tokenizer)) orelse continue;
|
||||
try pp.expectNl(&tokenizer);
|
||||
if (pp.defines.get(macro_name) == null) {
|
||||
if_kind.set(if_level, until_endif);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_endif);
|
||||
} else {
|
||||
if_kind.set(if_level, until_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_else);
|
||||
try pp.skip(&tokenizer, .until_else);
|
||||
}
|
||||
},
|
||||
@ -482,13 +482,13 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!TokenWithExpans
|
||||
if (if_level == 0) {
|
||||
try pp.err(directive, .elif_without_if);
|
||||
if_level += 1;
|
||||
if_kind.set(if_level, until_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_else);
|
||||
} else if (if_level == 1) {
|
||||
guard_name = null;
|
||||
}
|
||||
switch (if_kind.get(if_level)) {
|
||||
switch (std.mem.readPackedIntNative(u2, &if_kind, if_level * 2)) {
|
||||
until_else => if (try pp.expr(&tokenizer)) {
|
||||
if_kind.set(if_level, until_endif);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_endif);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering then branch of #elif", .{});
|
||||
}
|
||||
@ -510,15 +510,15 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!TokenWithExpans
|
||||
if (if_level == 0) {
|
||||
try pp.err(directive, .elifdef_without_if);
|
||||
if_level += 1;
|
||||
if_kind.set(if_level, until_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_else);
|
||||
} else if (if_level == 1) {
|
||||
guard_name = null;
|
||||
}
|
||||
switch (if_kind.get(if_level)) {
|
||||
switch (std.mem.readPackedIntNative(u2, &if_kind, if_level * 2)) {
|
||||
until_else => {
|
||||
const macro_name = try pp.expectMacroName(&tokenizer);
|
||||
if (macro_name == null) {
|
||||
if_kind.set(if_level, until_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_else);
|
||||
try pp.skip(&tokenizer, .until_else);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering else branch of #elifdef", .{});
|
||||
@ -526,12 +526,12 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!TokenWithExpans
|
||||
} else {
|
||||
try pp.expectNl(&tokenizer);
|
||||
if (pp.defines.get(macro_name.?) != null) {
|
||||
if_kind.set(if_level, until_endif);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_endif);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering then branch of #elifdef", .{});
|
||||
}
|
||||
} else {
|
||||
if_kind.set(if_level, until_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_else);
|
||||
try pp.skip(&tokenizer, .until_else);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering else branch of #elifdef", .{});
|
||||
@ -551,15 +551,15 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!TokenWithExpans
|
||||
if (if_level == 0) {
|
||||
try pp.err(directive, .elifdef_without_if);
|
||||
if_level += 1;
|
||||
if_kind.set(if_level, until_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_else);
|
||||
} else if (if_level == 1) {
|
||||
guard_name = null;
|
||||
}
|
||||
switch (if_kind.get(if_level)) {
|
||||
switch (std.mem.readPackedIntNative(u2, &if_kind, if_level * 2)) {
|
||||
until_else => {
|
||||
const macro_name = try pp.expectMacroName(&tokenizer);
|
||||
if (macro_name == null) {
|
||||
if_kind.set(if_level, until_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_else);
|
||||
try pp.skip(&tokenizer, .until_else);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering else branch of #elifndef", .{});
|
||||
@ -567,12 +567,12 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!TokenWithExpans
|
||||
} else {
|
||||
try pp.expectNl(&tokenizer);
|
||||
if (pp.defines.get(macro_name.?) == null) {
|
||||
if_kind.set(if_level, until_endif);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_endif);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering then branch of #elifndef", .{});
|
||||
}
|
||||
} else {
|
||||
if_kind.set(if_level, until_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_else);
|
||||
try pp.skip(&tokenizer, .until_else);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "entering else branch of #elifndef", .{});
|
||||
@ -596,9 +596,9 @@ fn preprocessExtra(pp: *Preprocessor, source: Source) MacroError!TokenWithExpans
|
||||
} else if (if_level == 1) {
|
||||
guard_name = null;
|
||||
}
|
||||
switch (if_kind.get(if_level)) {
|
||||
switch (std.mem.readPackedIntNative(u2, &if_kind, if_level * 2)) {
|
||||
until_else => {
|
||||
if_kind.set(if_level, until_endif_seen_else);
|
||||
std.mem.writePackedIntNative(u2, &if_kind, if_level * 2, until_endif_seen_else);
|
||||
if (pp.verbose) {
|
||||
pp.verboseLog(directive, "#else branch here", .{});
|
||||
}
|
||||
|
@ -28,8 +28,6 @@ const PageStatus = enum(u1) {
|
||||
const FreeBlock = struct {
|
||||
data: []u128,
|
||||
|
||||
const Io = std.packed_int_array.PackedIntIo(u1, .little);
|
||||
|
||||
fn totalPages(self: FreeBlock) usize {
|
||||
return self.data.len * 128;
|
||||
}
|
||||
@ -39,15 +37,15 @@ const FreeBlock = struct {
|
||||
}
|
||||
|
||||
fn getBit(self: FreeBlock, idx: usize) PageStatus {
|
||||
const bit_offset = 0;
|
||||
return @as(PageStatus, @enumFromInt(Io.get(mem.sliceAsBytes(self.data), idx, bit_offset)));
|
||||
const bit = mem.readPackedInt(u1, mem.sliceAsBytes(self.data), idx, .little);
|
||||
return @as(PageStatus, @enumFromInt(bit));
|
||||
}
|
||||
|
||||
fn setBits(self: FreeBlock, start_idx: usize, len: usize, val: PageStatus) void {
|
||||
const bit_offset = 0;
|
||||
var i: usize = 0;
|
||||
const bytes = mem.sliceAsBytes(self.data);
|
||||
while (i < len) : (i += 1) {
|
||||
Io.set(mem.sliceAsBytes(self.data), start_idx + i, bit_offset, @intFromEnum(val));
|
||||
mem.writePackedInt(u1, bytes, start_idx + i, @intFromEnum(val), .little);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -795,7 +795,6 @@ pub const Mutable = struct {
|
||||
const endian_mask: usize = (@sizeOf(Limb) - 1) << 3;
|
||||
|
||||
const bytes = std.mem.sliceAsBytes(r.limbs);
|
||||
var bits = std.packed_int_array.PackedIntSliceEndian(u1, .little).init(bytes, limbs_required * @bitSizeOf(Limb));
|
||||
|
||||
var k: usize = 0;
|
||||
while (k < ((bit_count + 1) / 2)) : (k += 1) {
|
||||
@ -809,17 +808,17 @@ pub const Mutable = struct {
|
||||
rev_i ^= endian_mask;
|
||||
}
|
||||
|
||||
const bit_i = bits.get(i);
|
||||
const bit_rev_i = bits.get(rev_i);
|
||||
bits.set(i, bit_rev_i);
|
||||
bits.set(rev_i, bit_i);
|
||||
const bit_i = std.mem.readPackedInt(u1, bytes, i, .little);
|
||||
const bit_rev_i = std.mem.readPackedInt(u1, bytes, rev_i, .little);
|
||||
std.mem.writePackedInt(u1, bytes, i, bit_rev_i, .little);
|
||||
std.mem.writePackedInt(u1, bytes, rev_i, bit_i, .little);
|
||||
}
|
||||
|
||||
// Calculate signed-magnitude representation for output
|
||||
if (signedness == .signed) {
|
||||
const last_bit = switch (native_endian) {
|
||||
.little => bits.get(bit_count - 1),
|
||||
.big => bits.get((bit_count - 1) ^ endian_mask),
|
||||
.little => std.mem.readPackedInt(u1, bytes, bit_count - 1, .little),
|
||||
.big => std.mem.readPackedInt(u1, bytes, (bit_count - 1) ^ endian_mask, .little),
|
||||
};
|
||||
if (last_bit == 1) {
|
||||
r.bitNotWrap(r.toConst(), .unsigned, bit_count); // Bitwise NOT.
|
||||
|
@ -1,697 +0,0 @@
|
||||
//! A set of array and slice types that bit-pack integer elements. A normal [12]u3
|
||||
//! takes up 12 bytes of memory since u3's alignment is 1. PackedArray(u3, 12) only
|
||||
//! takes up 4 bytes of memory.
|
||||
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const debug = std.debug;
|
||||
const testing = std.testing;
|
||||
const native_endian = builtin.target.cpu.arch.endian();
|
||||
const Endian = std.builtin.Endian;
|
||||
|
||||
/// Provides a set of functions for reading and writing packed integers from a
|
||||
/// slice of bytes.
|
||||
pub fn PackedIntIo(comptime Int: type, comptime endian: Endian) type {
|
||||
// The general technique employed here is to cast bytes in the array to a container
|
||||
// integer (having bits % 8 == 0) large enough to contain the number of bits we want,
|
||||
// then we can retrieve or store the new value with a relative minimum of masking
|
||||
// and shifting. In this worst case, this means that we'll need an integer that's
|
||||
// actually 1 byte larger than the minimum required to store the bits, because it
|
||||
// is possible that the bits start at the end of the first byte, continue through
|
||||
// zero or more, then end in the beginning of the last. But, if we try to access
|
||||
// a value in the very last byte of memory with that integer size, that extra byte
|
||||
// will be out of bounds. Depending on the circumstances of the memory, that might
|
||||
// mean the OS fatally kills the program. Thus, we use a larger container (MaxIo)
|
||||
// most of the time, but a smaller container (MinIo) when touching the last byte
|
||||
// of the memory.
|
||||
const int_bits = @bitSizeOf(Int);
|
||||
|
||||
// In the best case, this is the number of bytes we need to touch
|
||||
// to read or write a value, as bits.
|
||||
const min_io_bits = ((int_bits + 7) / 8) * 8;
|
||||
|
||||
// In the worst case, this is the number of bytes we need to touch
|
||||
// to read or write a value, as bits. To calculate for int_bits > 1,
|
||||
// set aside 2 bits to touch the first and last bytes, then divide
|
||||
// by 8 to see how many bytes can be filled up in between.
|
||||
const max_io_bits = switch (int_bits) {
|
||||
0 => 0,
|
||||
1 => 8,
|
||||
else => ((int_bits - 2) / 8 + 2) * 8,
|
||||
};
|
||||
|
||||
// We bitcast the desired Int type to an unsigned version of itself
|
||||
// to avoid issues with shifting signed ints.
|
||||
const UnInt = std.meta.Int(.unsigned, int_bits);
|
||||
|
||||
// The maximum container int type
|
||||
const MinIo = std.meta.Int(.unsigned, min_io_bits);
|
||||
|
||||
// The minimum container int type
|
||||
const MaxIo = std.meta.Int(.unsigned, max_io_bits);
|
||||
|
||||
return struct {
|
||||
/// Retrieves the integer at `index` from the packed data beginning at `bit_offset`
|
||||
/// within `bytes`.
|
||||
pub fn get(bytes: []const u8, index: usize, bit_offset: u7) Int {
|
||||
if (int_bits == 0) return 0;
|
||||
|
||||
const bit_index = (index * int_bits) + bit_offset;
|
||||
const max_end_byte = (bit_index + max_io_bits) / 8;
|
||||
|
||||
//using the larger container size will potentially read out of bounds
|
||||
if (max_end_byte > bytes.len) return getBits(bytes, MinIo, bit_index);
|
||||
return getBits(bytes, MaxIo, bit_index);
|
||||
}
|
||||
|
||||
fn getBits(bytes: []const u8, comptime Container: type, bit_index: usize) Int {
|
||||
const container_bits = @bitSizeOf(Container);
|
||||
|
||||
const start_byte = bit_index / 8;
|
||||
const head_keep_bits = bit_index - (start_byte * 8);
|
||||
const tail_keep_bits = container_bits - (int_bits + head_keep_bits);
|
||||
|
||||
//read bytes as container
|
||||
const value_ptr: *align(1) const Container = @ptrCast(&bytes[start_byte]);
|
||||
var value = value_ptr.*;
|
||||
|
||||
if (endian != native_endian) value = @byteSwap(value);
|
||||
|
||||
switch (endian) {
|
||||
.big => {
|
||||
value <<= @intCast(head_keep_bits);
|
||||
value >>= @intCast(head_keep_bits);
|
||||
value >>= @intCast(tail_keep_bits);
|
||||
},
|
||||
.little => {
|
||||
value <<= @intCast(tail_keep_bits);
|
||||
value >>= @intCast(tail_keep_bits);
|
||||
value >>= @intCast(head_keep_bits);
|
||||
},
|
||||
}
|
||||
|
||||
return @bitCast(@as(UnInt, @truncate(value)));
|
||||
}
|
||||
|
||||
/// Sets the integer at `index` to `val` within the packed data beginning
|
||||
/// at `bit_offset` into `bytes`.
|
||||
pub fn set(bytes: []u8, index: usize, bit_offset: u3, int: Int) void {
|
||||
if (int_bits == 0) return;
|
||||
|
||||
const bit_index = (index * int_bits) + bit_offset;
|
||||
const max_end_byte = (bit_index + max_io_bits) / 8;
|
||||
|
||||
//using the larger container size will potentially write out of bounds
|
||||
if (max_end_byte > bytes.len) return setBits(bytes, MinIo, bit_index, int);
|
||||
setBits(bytes, MaxIo, bit_index, int);
|
||||
}
|
||||
|
||||
fn setBits(bytes: []u8, comptime Container: type, bit_index: usize, int: Int) void {
|
||||
const container_bits = @bitSizeOf(Container);
|
||||
const Shift = std.math.Log2Int(Container);
|
||||
|
||||
const start_byte = bit_index / 8;
|
||||
const head_keep_bits = bit_index - (start_byte * 8);
|
||||
const tail_keep_bits = container_bits - (int_bits + head_keep_bits);
|
||||
const keep_shift: Shift = switch (endian) {
|
||||
.big => @intCast(tail_keep_bits),
|
||||
.little => @intCast(head_keep_bits),
|
||||
};
|
||||
|
||||
//position the bits where they need to be in the container
|
||||
const value = @as(Container, @intCast(@as(UnInt, @bitCast(int)))) << keep_shift;
|
||||
|
||||
//read existing bytes
|
||||
const target_ptr: *align(1) Container = @ptrCast(&bytes[start_byte]);
|
||||
var target = target_ptr.*;
|
||||
|
||||
if (endian != native_endian) target = @byteSwap(target);
|
||||
|
||||
//zero the bits we want to replace in the existing bytes
|
||||
const inv_mask = @as(Container, @intCast(std.math.maxInt(UnInt))) << keep_shift;
|
||||
const mask = ~inv_mask;
|
||||
target &= mask;
|
||||
|
||||
//merge the new value
|
||||
target |= value;
|
||||
|
||||
if (endian != native_endian) target = @byteSwap(target);
|
||||
|
||||
//save it back
|
||||
target_ptr.* = target;
|
||||
}
|
||||
|
||||
/// Provides a PackedIntSlice of the packed integers in `bytes` (which begins at `bit_offset`)
|
||||
/// from the element specified by `start` to the element specified by `end`.
|
||||
pub fn slice(bytes: []u8, bit_offset: u3, start: usize, end: usize) PackedIntSliceEndian(Int, endian) {
|
||||
debug.assert(end >= start);
|
||||
|
||||
const length = end - start;
|
||||
const bit_index = (start * int_bits) + bit_offset;
|
||||
const start_byte = bit_index / 8;
|
||||
const end_byte = (bit_index + (length * int_bits) + 7) / 8;
|
||||
const new_bytes = bytes[start_byte..end_byte];
|
||||
|
||||
if (length == 0) return PackedIntSliceEndian(Int, endian).init(new_bytes[0..0], 0);
|
||||
|
||||
var new_slice = PackedIntSliceEndian(Int, endian).init(new_bytes, length);
|
||||
new_slice.bit_offset = @intCast((bit_index - (start_byte * 8)));
|
||||
return new_slice;
|
||||
}
|
||||
|
||||
/// Recasts a packed slice to a version with elements of type `NewInt` and endianness `new_endian`.
|
||||
/// Slice will begin at `bit_offset` within `bytes` and the new length will be automatically
|
||||
/// calculated from `old_len` using the sizes of the current integer type and `NewInt`.
|
||||
pub fn sliceCast(bytes: []u8, comptime NewInt: type, comptime new_endian: Endian, bit_offset: u3, old_len: usize) PackedIntSliceEndian(NewInt, new_endian) {
|
||||
const new_int_bits = @bitSizeOf(NewInt);
|
||||
const New = PackedIntSliceEndian(NewInt, new_endian);
|
||||
|
||||
const total_bits = (old_len * int_bits);
|
||||
const new_int_count = total_bits / new_int_bits;
|
||||
|
||||
debug.assert(total_bits == new_int_count * new_int_bits);
|
||||
|
||||
var new = New.init(bytes, new_int_count);
|
||||
new.bit_offset = bit_offset;
|
||||
|
||||
return new;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Creates a bit-packed array of `Int`. Non-byte-multiple integers
|
||||
/// will take up less memory in PackedIntArray than in a normal array.
|
||||
/// Elements are packed using native endianness and without storing any
|
||||
/// meta data. PackedArray(i3, 8) will occupy exactly 3 bytes
|
||||
/// of memory.
|
||||
pub fn PackedIntArray(comptime Int: type, comptime int_count: usize) type {
|
||||
return PackedIntArrayEndian(Int, native_endian, int_count);
|
||||
}
|
||||
|
||||
/// Creates a bit-packed array of `Int` with bit order specified by `endian`.
|
||||
/// Non-byte-multiple integers will take up less memory in PackedIntArrayEndian
|
||||
/// than in a normal array. Elements are packed without storing any meta data.
|
||||
/// PackedIntArrayEndian(i3, 8) will occupy exactly 3 bytes of memory.
|
||||
pub fn PackedIntArrayEndian(comptime Int: type, comptime endian: Endian, comptime int_count: usize) type {
|
||||
const int_bits = @bitSizeOf(Int);
|
||||
const total_bits = int_bits * int_count;
|
||||
const total_bytes = (total_bits + 7) / 8;
|
||||
|
||||
const Io = PackedIntIo(Int, endian);
|
||||
|
||||
return struct {
|
||||
const Self = @This();
|
||||
|
||||
/// The byte buffer containing the packed data.
|
||||
bytes: [total_bytes]u8,
|
||||
/// The number of elements in the packed array.
|
||||
comptime len: usize = int_count,
|
||||
|
||||
/// The integer type of the packed array.
|
||||
pub const Child = Int;
|
||||
|
||||
/// Initialize a packed array using an unpacked array
|
||||
/// or, more likely, an array literal.
|
||||
pub fn init(ints: [int_count]Int) Self {
|
||||
var self: Self = undefined;
|
||||
for (ints, 0..) |int, i| self.set(i, int);
|
||||
return self;
|
||||
}
|
||||
|
||||
/// Initialize all entries of a packed array to the same value.
|
||||
pub fn initAllTo(int: Int) Self {
|
||||
var self: Self = undefined;
|
||||
self.setAll(int);
|
||||
return self;
|
||||
}
|
||||
|
||||
/// Return the integer stored at `index`.
|
||||
pub fn get(self: Self, index: usize) Int {
|
||||
debug.assert(index < int_count);
|
||||
return Io.get(&self.bytes, index, 0);
|
||||
}
|
||||
|
||||
/// Copy the value of `int` into the array at `index`.
|
||||
pub fn set(self: *Self, index: usize, int: Int) void {
|
||||
debug.assert(index < int_count);
|
||||
return Io.set(&self.bytes, index, 0, int);
|
||||
}
|
||||
|
||||
/// Set all entries of a packed array to the value of `int`.
|
||||
pub fn setAll(self: *Self, int: Int) void {
|
||||
var i: usize = 0;
|
||||
while (i < int_count) : (i += 1) {
|
||||
self.set(i, int);
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a PackedIntSlice of the array from `start` to `end`.
|
||||
pub fn slice(self: *Self, start: usize, end: usize) PackedIntSliceEndian(Int, endian) {
|
||||
debug.assert(start < int_count);
|
||||
debug.assert(end <= int_count);
|
||||
return Io.slice(&self.bytes, 0, start, end);
|
||||
}
|
||||
|
||||
/// Create a PackedIntSlice of the array using `NewInt` as the integer type.
|
||||
/// `NewInt`'s bit width must fit evenly within the array's `Int`'s total bits.
|
||||
pub fn sliceCast(self: *Self, comptime NewInt: type) PackedIntSlice(NewInt) {
|
||||
return self.sliceCastEndian(NewInt, endian);
|
||||
}
|
||||
|
||||
/// Create a PackedIntSliceEndian of the array using `NewInt` as the integer type
|
||||
/// and `new_endian` as the new endianness. `NewInt`'s bit width must fit evenly
|
||||
/// within the array's `Int`'s total bits.
|
||||
pub fn sliceCastEndian(self: *Self, comptime NewInt: type, comptime new_endian: Endian) PackedIntSliceEndian(NewInt, new_endian) {
|
||||
return Io.sliceCast(&self.bytes, NewInt, new_endian, 0, int_count);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// A type representing a sub range of a PackedIntArray.
|
||||
pub fn PackedIntSlice(comptime Int: type) type {
|
||||
return PackedIntSliceEndian(Int, native_endian);
|
||||
}
|
||||
|
||||
/// A type representing a sub range of a PackedIntArrayEndian.
|
||||
pub fn PackedIntSliceEndian(comptime Int: type, comptime endian: Endian) type {
|
||||
const int_bits = @bitSizeOf(Int);
|
||||
const Io = PackedIntIo(Int, endian);
|
||||
|
||||
return struct {
|
||||
const Self = @This();
|
||||
|
||||
bytes: []u8,
|
||||
bit_offset: u3,
|
||||
len: usize,
|
||||
|
||||
/// The integer type of the packed slice.
|
||||
pub const Child = Int;
|
||||
|
||||
/// Calculates the number of bytes required to store a desired count
|
||||
/// of `Int`s.
|
||||
pub fn bytesRequired(int_count: usize) usize {
|
||||
const total_bits = int_bits * int_count;
|
||||
const total_bytes = (total_bits + 7) / 8;
|
||||
return total_bytes;
|
||||
}
|
||||
|
||||
/// Initialize a packed slice using the memory at `bytes`, with `int_count`
|
||||
/// elements. `bytes` must be large enough to accommodate the requested
|
||||
/// count.
|
||||
pub fn init(bytes: []u8, int_count: usize) Self {
|
||||
debug.assert(bytes.len >= bytesRequired(int_count));
|
||||
|
||||
return Self{
|
||||
.bytes = bytes,
|
||||
.len = int_count,
|
||||
.bit_offset = 0,
|
||||
};
|
||||
}
|
||||
|
||||
/// Return the integer stored at `index`.
|
||||
pub fn get(self: Self, index: usize) Int {
|
||||
debug.assert(index < self.len);
|
||||
return Io.get(self.bytes, index, self.bit_offset);
|
||||
}
|
||||
|
||||
/// Copy `int` into the slice at `index`.
|
||||
pub fn set(self: *Self, index: usize, int: Int) void {
|
||||
debug.assert(index < self.len);
|
||||
return Io.set(self.bytes, index, self.bit_offset, int);
|
||||
}
|
||||
|
||||
/// Create a PackedIntSlice of this slice from `start` to `end`.
|
||||
pub fn slice(self: Self, start: usize, end: usize) PackedIntSliceEndian(Int, endian) {
|
||||
debug.assert(start < self.len);
|
||||
debug.assert(end <= self.len);
|
||||
return Io.slice(self.bytes, self.bit_offset, start, end);
|
||||
}
|
||||
|
||||
/// Create a PackedIntSlice of the sclice using `NewInt` as the integer type.
|
||||
/// `NewInt`'s bit width must fit evenly within the slice's `Int`'s total bits.
|
||||
pub fn sliceCast(self: Self, comptime NewInt: type) PackedIntSliceEndian(NewInt, endian) {
|
||||
return self.sliceCastEndian(NewInt, endian);
|
||||
}
|
||||
|
||||
/// Create a PackedIntSliceEndian of the slice using `NewInt` as the integer type
|
||||
/// and `new_endian` as the new endianness. `NewInt`'s bit width must fit evenly
|
||||
/// within the slice's `Int`'s total bits.
|
||||
pub fn sliceCastEndian(self: Self, comptime NewInt: type, comptime new_endian: Endian) PackedIntSliceEndian(NewInt, new_endian) {
|
||||
return Io.sliceCast(self.bytes, NewInt, new_endian, self.bit_offset, self.len);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
test "PackedIntArray" {
|
||||
// TODO @setEvalBranchQuota generates panics in wasm32. Investigate.
|
||||
if (builtin.target.cpu.arch == .wasm32) return error.SkipZigTest;
|
||||
|
||||
// TODO: enable this test
|
||||
if (true) return error.SkipZigTest;
|
||||
|
||||
@setEvalBranchQuota(10000);
|
||||
const max_bits = 256;
|
||||
const int_count = 19;
|
||||
|
||||
comptime var bits = 0;
|
||||
inline while (bits <= max_bits) : (bits += 1) {
|
||||
//alternate unsigned and signed
|
||||
const sign: std.builtin.Signedness = if (bits % 2 == 0) .signed else .unsigned;
|
||||
const I = std.meta.Int(sign, bits);
|
||||
|
||||
const PackedArray = PackedIntArray(I, int_count);
|
||||
const expected_bytes = ((bits * int_count) + 7) / 8;
|
||||
try testing.expect(@sizeOf(PackedArray) == expected_bytes);
|
||||
|
||||
var data: PackedArray = undefined;
|
||||
|
||||
//write values, counting up
|
||||
var i: usize = 0;
|
||||
var count: I = 0;
|
||||
while (i < data.len) : (i += 1) {
|
||||
data.set(i, count);
|
||||
if (bits > 0) count +%= 1;
|
||||
}
|
||||
|
||||
//read and verify values
|
||||
i = 0;
|
||||
count = 0;
|
||||
while (i < data.len) : (i += 1) {
|
||||
const val = data.get(i);
|
||||
try testing.expect(val == count);
|
||||
if (bits > 0) count +%= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test "PackedIntIo" {
|
||||
const bytes = [_]u8{ 0b01101_000, 0b01011_110, 0b00011_101 };
|
||||
try testing.expectEqual(@as(u15, 0x2bcd), PackedIntIo(u15, .little).get(&bytes, 0, 3));
|
||||
try testing.expectEqual(@as(u16, 0xabcd), PackedIntIo(u16, .little).get(&bytes, 0, 3));
|
||||
try testing.expectEqual(@as(u17, 0x1abcd), PackedIntIo(u17, .little).get(&bytes, 0, 3));
|
||||
try testing.expectEqual(@as(u18, 0x3abcd), PackedIntIo(u18, .little).get(&bytes, 0, 3));
|
||||
}
|
||||
|
||||
test "PackedIntArray init" {
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
const PackedArray = PackedIntArray(u3, 8);
|
||||
var packed_array = PackedArray.init([_]u3{ 0, 1, 2, 3, 4, 5, 6, 7 });
|
||||
var i: usize = 0;
|
||||
while (i < packed_array.len) : (i += 1) try testing.expectEqual(@as(u3, @intCast(i)), packed_array.get(i));
|
||||
}
|
||||
};
|
||||
try S.doTheTest();
|
||||
try comptime S.doTheTest();
|
||||
}
|
||||
|
||||
test "PackedIntArray initAllTo" {
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
const PackedArray = PackedIntArray(u3, 8);
|
||||
var packed_array = PackedArray.initAllTo(5);
|
||||
var i: usize = 0;
|
||||
while (i < packed_array.len) : (i += 1) try testing.expectEqual(@as(u3, 5), packed_array.get(i));
|
||||
}
|
||||
};
|
||||
try S.doTheTest();
|
||||
try comptime S.doTheTest();
|
||||
}
|
||||
|
||||
test "PackedIntSlice" {
|
||||
// TODO @setEvalBranchQuota generates panics in wasm32. Investigate.
|
||||
if (builtin.target.cpu.arch == .wasm32) return error.SkipZigTest;
|
||||
|
||||
// TODO enable this test
|
||||
if (true) return error.SkipZigTest;
|
||||
|
||||
@setEvalBranchQuota(10000);
|
||||
const max_bits = 256;
|
||||
const int_count = 19;
|
||||
const total_bits = max_bits * int_count;
|
||||
const total_bytes = (total_bits + 7) / 8;
|
||||
|
||||
var buffer: [total_bytes]u8 = undefined;
|
||||
|
||||
comptime var bits = 0;
|
||||
inline while (bits <= max_bits) : (bits += 1) {
|
||||
//alternate unsigned and signed
|
||||
const sign: std.builtin.Signedness = if (bits % 2 == 0) .signed else .unsigned;
|
||||
const I = std.meta.Int(sign, bits);
|
||||
const P = PackedIntSlice(I);
|
||||
|
||||
var data = P.init(&buffer, int_count);
|
||||
|
||||
//write values, counting up
|
||||
var i: usize = 0;
|
||||
var count: I = 0;
|
||||
while (i < data.len) : (i += 1) {
|
||||
data.set(i, count);
|
||||
if (bits > 0) count +%= 1;
|
||||
}
|
||||
|
||||
//read and verify values
|
||||
i = 0;
|
||||
count = 0;
|
||||
while (i < data.len) : (i += 1) {
|
||||
const val = data.get(i);
|
||||
try testing.expect(val == count);
|
||||
if (bits > 0) count +%= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test "PackedIntSlice of PackedInt(Array/Slice)" {
|
||||
// TODO enable this test
|
||||
if (true) return error.SkipZigTest;
|
||||
|
||||
const max_bits = 16;
|
||||
const int_count = 19;
|
||||
|
||||
comptime var bits = 0;
|
||||
inline while (bits <= max_bits) : (bits += 1) {
|
||||
const Int = std.meta.Int(.unsigned, bits);
|
||||
|
||||
const PackedArray = PackedIntArray(Int, int_count);
|
||||
var packed_array: PackedArray = undefined;
|
||||
|
||||
const limit = (1 << bits);
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < packed_array.len) : (i += 1) {
|
||||
packed_array.set(i, @intCast(i % limit));
|
||||
}
|
||||
|
||||
//slice of array
|
||||
var packed_slice = packed_array.slice(2, 5);
|
||||
try testing.expect(packed_slice.len == 3);
|
||||
const ps_bit_count = (bits * packed_slice.len) + packed_slice.bit_offset;
|
||||
const ps_expected_bytes = (ps_bit_count + 7) / 8;
|
||||
try testing.expect(packed_slice.bytes.len == ps_expected_bytes);
|
||||
try testing.expect(packed_slice.get(0) == 2 % limit);
|
||||
try testing.expect(packed_slice.get(1) == 3 % limit);
|
||||
try testing.expect(packed_slice.get(2) == 4 % limit);
|
||||
packed_slice.set(1, 7 % limit);
|
||||
try testing.expect(packed_slice.get(1) == 7 % limit);
|
||||
|
||||
//write through slice
|
||||
try testing.expect(packed_array.get(3) == 7 % limit);
|
||||
|
||||
//slice of a slice
|
||||
const packed_slice_two = packed_slice.slice(0, 3);
|
||||
try testing.expect(packed_slice_two.len == 3);
|
||||
const ps2_bit_count = (bits * packed_slice_two.len) + packed_slice_two.bit_offset;
|
||||
const ps2_expected_bytes = (ps2_bit_count + 7) / 8;
|
||||
try testing.expect(packed_slice_two.bytes.len == ps2_expected_bytes);
|
||||
try testing.expect(packed_slice_two.get(1) == 7 % limit);
|
||||
try testing.expect(packed_slice_two.get(2) == 4 % limit);
|
||||
|
||||
//size one case
|
||||
const packed_slice_three = packed_slice_two.slice(1, 2);
|
||||
try testing.expect(packed_slice_three.len == 1);
|
||||
const ps3_bit_count = (bits * packed_slice_three.len) + packed_slice_three.bit_offset;
|
||||
const ps3_expected_bytes = (ps3_bit_count + 7) / 8;
|
||||
try testing.expect(packed_slice_three.bytes.len == ps3_expected_bytes);
|
||||
try testing.expect(packed_slice_three.get(0) == 7 % limit);
|
||||
|
||||
//empty slice case
|
||||
const packed_slice_empty = packed_slice.slice(0, 0);
|
||||
try testing.expect(packed_slice_empty.len == 0);
|
||||
try testing.expect(packed_slice_empty.bytes.len == 0);
|
||||
|
||||
//slicing at byte boundaries
|
||||
const packed_slice_edge = packed_array.slice(8, 16);
|
||||
try testing.expect(packed_slice_edge.len == 8);
|
||||
const pse_bit_count = (bits * packed_slice_edge.len) + packed_slice_edge.bit_offset;
|
||||
const pse_expected_bytes = (pse_bit_count + 7) / 8;
|
||||
try testing.expect(packed_slice_edge.bytes.len == pse_expected_bytes);
|
||||
try testing.expect(packed_slice_edge.bit_offset == 0);
|
||||
}
|
||||
}
|
||||
|
||||
test "PackedIntSlice accumulating bit offsets" {
|
||||
//bit_offset is u3, so standard debugging asserts should catch
|
||||
// anything
|
||||
{
|
||||
const PackedArray = PackedIntArray(u3, 16);
|
||||
var packed_array: PackedArray = undefined;
|
||||
|
||||
var packed_slice = packed_array.slice(0, packed_array.len);
|
||||
var i: usize = 0;
|
||||
while (i < packed_array.len - 1) : (i += 1) {
|
||||
packed_slice = packed_slice.slice(1, packed_slice.len);
|
||||
}
|
||||
}
|
||||
{
|
||||
const PackedArray = PackedIntArray(u11, 88);
|
||||
var packed_array: PackedArray = undefined;
|
||||
|
||||
var packed_slice = packed_array.slice(0, packed_array.len);
|
||||
var i: usize = 0;
|
||||
while (i < packed_array.len - 1) : (i += 1) {
|
||||
packed_slice = packed_slice.slice(1, packed_slice.len);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test "PackedInt(Array/Slice) sliceCast" {
|
||||
const PackedArray = PackedIntArray(u1, 16);
|
||||
var packed_array = PackedArray.init([_]u1{ 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1 });
|
||||
const packed_slice_cast_2 = packed_array.sliceCast(u2);
|
||||
const packed_slice_cast_4 = packed_slice_cast_2.sliceCast(u4);
|
||||
var packed_slice_cast_9 = packed_array.slice(0, (packed_array.len / 9) * 9).sliceCast(u9);
|
||||
const packed_slice_cast_3 = packed_slice_cast_9.sliceCast(u3);
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < packed_slice_cast_2.len) : (i += 1) {
|
||||
const val = switch (native_endian) {
|
||||
.big => 0b01,
|
||||
.little => 0b10,
|
||||
};
|
||||
try testing.expect(packed_slice_cast_2.get(i) == val);
|
||||
}
|
||||
i = 0;
|
||||
while (i < packed_slice_cast_4.len) : (i += 1) {
|
||||
const val = switch (native_endian) {
|
||||
.big => 0b0101,
|
||||
.little => 0b1010,
|
||||
};
|
||||
try testing.expect(packed_slice_cast_4.get(i) == val);
|
||||
}
|
||||
i = 0;
|
||||
while (i < packed_slice_cast_9.len) : (i += 1) {
|
||||
const val = 0b010101010;
|
||||
try testing.expect(packed_slice_cast_9.get(i) == val);
|
||||
packed_slice_cast_9.set(i, 0b111000111);
|
||||
}
|
||||
i = 0;
|
||||
while (i < packed_slice_cast_3.len) : (i += 1) {
|
||||
const val: u3 = switch (native_endian) {
|
||||
.big => if (i % 2 == 0) 0b111 else 0b000,
|
||||
.little => if (i % 2 == 0) 0b111 else 0b000,
|
||||
};
|
||||
try testing.expect(packed_slice_cast_3.get(i) == val);
|
||||
}
|
||||
}
|
||||
|
||||
test "PackedInt(Array/Slice)Endian" {
|
||||
{
|
||||
const PackedArrayBe = PackedIntArrayEndian(u4, .big, 8);
|
||||
var packed_array_be = PackedArrayBe.init([_]u4{ 0, 1, 2, 3, 4, 5, 6, 7 });
|
||||
try testing.expect(packed_array_be.bytes[0] == 0b00000001);
|
||||
try testing.expect(packed_array_be.bytes[1] == 0b00100011);
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < packed_array_be.len) : (i += 1) {
|
||||
try testing.expect(packed_array_be.get(i) == i);
|
||||
}
|
||||
|
||||
var packed_slice_le = packed_array_be.sliceCastEndian(u4, .little);
|
||||
i = 0;
|
||||
while (i < packed_slice_le.len) : (i += 1) {
|
||||
const val = if (i % 2 == 0) i + 1 else i - 1;
|
||||
try testing.expect(packed_slice_le.get(i) == val);
|
||||
}
|
||||
|
||||
var packed_slice_le_shift = packed_array_be.slice(1, 5).sliceCastEndian(u4, .little);
|
||||
i = 0;
|
||||
while (i < packed_slice_le_shift.len) : (i += 1) {
|
||||
const val = if (i % 2 == 0) i else i + 2;
|
||||
try testing.expect(packed_slice_le_shift.get(i) == val);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
const PackedArrayBe = PackedIntArrayEndian(u11, .big, 8);
|
||||
var packed_array_be = PackedArrayBe.init([_]u11{ 0, 1, 2, 3, 4, 5, 6, 7 });
|
||||
try testing.expect(packed_array_be.bytes[0] == 0b00000000);
|
||||
try testing.expect(packed_array_be.bytes[1] == 0b00000000);
|
||||
try testing.expect(packed_array_be.bytes[2] == 0b00000100);
|
||||
try testing.expect(packed_array_be.bytes[3] == 0b00000001);
|
||||
try testing.expect(packed_array_be.bytes[4] == 0b00000000);
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < packed_array_be.len) : (i += 1) {
|
||||
try testing.expect(packed_array_be.get(i) == i);
|
||||
}
|
||||
|
||||
var packed_slice_le = packed_array_be.sliceCastEndian(u11, .little);
|
||||
try testing.expect(packed_slice_le.get(0) == 0b00000000000);
|
||||
try testing.expect(packed_slice_le.get(1) == 0b00010000000);
|
||||
try testing.expect(packed_slice_le.get(2) == 0b00000000100);
|
||||
try testing.expect(packed_slice_le.get(3) == 0b00000000000);
|
||||
try testing.expect(packed_slice_le.get(4) == 0b00010000011);
|
||||
try testing.expect(packed_slice_le.get(5) == 0b00000000010);
|
||||
try testing.expect(packed_slice_le.get(6) == 0b10000010000);
|
||||
try testing.expect(packed_slice_le.get(7) == 0b00000111001);
|
||||
|
||||
var packed_slice_le_shift = packed_array_be.slice(1, 5).sliceCastEndian(u11, .little);
|
||||
try testing.expect(packed_slice_le_shift.get(0) == 0b00010000000);
|
||||
try testing.expect(packed_slice_le_shift.get(1) == 0b00000000100);
|
||||
try testing.expect(packed_slice_le_shift.get(2) == 0b00000000000);
|
||||
try testing.expect(packed_slice_le_shift.get(3) == 0b00010000011);
|
||||
}
|
||||
}
|
||||
|
||||
//@NOTE: Need to manually update this list as more posix os's get
|
||||
// added to DirectAllocator.
|
||||
|
||||
// These tests prove we aren't accidentally accessing memory past
|
||||
// the end of the array/slice by placing it at the end of a page
|
||||
// and reading the last element. The assumption is that the page
|
||||
// after this one is not mapped and will cause a segfault if we
|
||||
// don't account for the bounds.
|
||||
test "PackedIntArray at end of available memory" {
|
||||
switch (builtin.target.os.tag) {
|
||||
.linux, .macos, .ios, .freebsd, .netbsd, .openbsd, .windows => {},
|
||||
else => return,
|
||||
}
|
||||
const PackedArray = PackedIntArray(u3, 8);
|
||||
|
||||
const Padded = struct {
|
||||
_: [std.mem.page_size - @sizeOf(PackedArray)]u8,
|
||||
p: PackedArray,
|
||||
};
|
||||
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
var pad = try allocator.create(Padded);
|
||||
defer allocator.destroy(pad);
|
||||
pad.p.set(7, std.math.maxInt(u3));
|
||||
}
|
||||
|
||||
test "PackedIntSlice at end of available memory" {
|
||||
switch (builtin.target.os.tag) {
|
||||
.linux, .macos, .ios, .freebsd, .netbsd, .openbsd, .windows => {},
|
||||
else => return,
|
||||
}
|
||||
const PackedSlice = PackedIntSlice(u11);
|
||||
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
var page = try allocator.alloc(u8, std.mem.page_size);
|
||||
defer allocator.free(page);
|
||||
|
||||
var p = PackedSlice.init(page[std.mem.page_size - 2 ..], 1);
|
||||
p.set(0, std.math.maxInt(u11));
|
||||
}
|
@ -26,10 +26,6 @@ pub const EnumSet = enums.EnumSet;
|
||||
pub const HashMap = hash_map.HashMap;
|
||||
pub const HashMapUnmanaged = hash_map.HashMapUnmanaged;
|
||||
pub const MultiArrayList = @import("multi_array_list.zig").MultiArrayList;
|
||||
pub const PackedIntArray = @import("packed_int_array.zig").PackedIntArray;
|
||||
pub const PackedIntArrayEndian = @import("packed_int_array.zig").PackedIntArrayEndian;
|
||||
pub const PackedIntSlice = @import("packed_int_array.zig").PackedIntSlice;
|
||||
pub const PackedIntSliceEndian = @import("packed_int_array.zig").PackedIntSliceEndian;
|
||||
pub const PriorityQueue = @import("priority_queue.zig").PriorityQueue;
|
||||
pub const PriorityDequeue = @import("priority_dequeue.zig").PriorityDequeue;
|
||||
pub const Progress = @import("Progress.zig");
|
||||
@ -82,7 +78,6 @@ pub const meta = @import("meta.zig");
|
||||
pub const net = @import("net.zig");
|
||||
pub const os = @import("os.zig");
|
||||
pub const once = @import("once.zig").once;
|
||||
pub const packed_int_array = @import("packed_int_array.zig");
|
||||
pub const pdb = @import("pdb.zig");
|
||||
pub const posix = @import("posix.zig");
|
||||
pub const process = @import("process.zig");
|
||||
|
Loading…
Reference in New Issue
Block a user