Support Reader for InStream

This commit is contained in:
Jonathan Marler 2020-06-08 22:34:50 -06:00 committed by Andrew Kelley
parent 4302f276ed
commit a282ac7a91
18 changed files with 757 additions and 681 deletions

View File

@ -5,7 +5,6 @@ const os = std.os;
const math = std.math;
const mem = std.mem;
const debug = std.debug;
const InStream = std.stream.InStream;
const File = std.fs.File;
pub const AT_NULL = 0;

View File

@ -216,11 +216,15 @@ pub fn LinearFifo(
}
/// Same as `read` except it returns an error union
/// The purpose of this function existing is to match `std.io.InStream` API.
/// The purpose of this function existing is to match `std.io.Reader` API.
fn readFn(self: *Self, dest: []u8) error{}!usize {
return self.read(dest);
}
pub fn reader(self: *Self) std.io.Reader(*Self, error{}, readFn) {
return .{ .context = self };
}
/// Deprecated: `use reader`
pub fn inStream(self: *Self) std.io.InStream(*Self, error{}, readFn) {
return .{ .context = self };
}
@ -431,10 +435,10 @@ test "LinearFifo(u8, .Dynamic)" {
{
try fifo.outStream().writeAll("This is a test");
var result: [30]u8 = undefined;
testing.expectEqualSlices(u8, "This", (try fifo.inStream().readUntilDelimiterOrEof(&result, ' ')).?);
testing.expectEqualSlices(u8, "is", (try fifo.inStream().readUntilDelimiterOrEof(&result, ' ')).?);
testing.expectEqualSlices(u8, "a", (try fifo.inStream().readUntilDelimiterOrEof(&result, ' ')).?);
testing.expectEqualSlices(u8, "test", (try fifo.inStream().readUntilDelimiterOrEof(&result, ' ')).?);
testing.expectEqualSlices(u8, "This", (try fifo.reader().readUntilDelimiterOrEof(&result, ' ')).?);
testing.expectEqualSlices(u8, "is", (try fifo.reader().readUntilDelimiterOrEof(&result, ' ')).?);
testing.expectEqualSlices(u8, "a", (try fifo.reader().readUntilDelimiterOrEof(&result, ' ')).?);
testing.expectEqualSlices(u8, "test", (try fifo.reader().readUntilDelimiterOrEof(&result, ' ')).?);
}
}

View File

@ -642,8 +642,14 @@ pub const File = struct {
}
}
pub const InStream = io.InStream(File, ReadError, read);
pub const Reader = io.Reader(File, ReadError, read);
/// Deprecated: use `Reader`
pub const InStream = Reader;
pub fn reader(file: File) io.Reader(File, ReadError, read) {
return .{ .context = file };
}
/// Deprecated: use `reader`
pub fn inStream(file: File) io.InStream(File, ReadError, read) {
return .{ .context = file };
}

View File

@ -101,7 +101,9 @@ pub fn getStdIn() File {
};
}
pub const InStream = @import("io/in_stream.zig").InStream;
pub const Reader = @import("io/reader.zig").Reader;
/// Deprecated: use `Reader`
pub const InStream = Reader;
pub const Writer = @import("io/writer.zig").Writer;
/// Deprecated: use `Writer`
pub const OutStream = Writer;
@ -114,8 +116,12 @@ pub const BufferedOutStream = BufferedWriter;
/// Deprecated: use `bufferedWriter`
pub const bufferedOutStream = bufferedWriter;
pub const BufferedInStream = @import("io/buffered_in_stream.zig").BufferedInStream;
pub const bufferedInStream = @import("io/buffered_in_stream.zig").bufferedInStream;
pub const BufferedReader = @import("io/buffered_reader.zig").BufferedReader;
pub const bufferedReader = @import("io/buffered_reader.zig").bufferedReader;
/// Deprecated: use `BufferedReader`
pub const BufferedInStream = BufferedReader;
/// Deprecated: use `bufferedReader`
pub const bufferedInStream = bufferedReader;
pub const PeekStream = @import("io/peek_stream.zig").PeekStream;
pub const peekStream = @import("io/peek_stream.zig").peekStream;
@ -144,8 +150,12 @@ pub const MultiOutStream = MultiWriter;
/// Deprecated: use `multiWriter`
pub const multiOutStream = multiWriter;
pub const BitInStream = @import("io/bit_in_stream.zig").BitInStream;
pub const bitInStream = @import("io/bit_in_stream.zig").bitInStream;
pub const BitReader = @import("io/bit_reader.zig").BitReader;
pub const bitReader = @import("io/bit_reader.zig").bitReader;
/// Deprecated: use `BitReader`
pub const BitInStream = BitReader;
/// Deprecated: use `bitReader`
pub const bitInStream = bitReader;
pub const BitWriter = @import("io/bit_writer.zig").BitWriter;
pub const bitWriter = @import("io/bit_writer.zig").bitWriter;
@ -184,15 +194,15 @@ test "null_writer" {
}
test "" {
_ = @import("io/bit_in_stream.zig");
_ = @import("io/bit_reader.zig");
_ = @import("io/bit_writer.zig");
_ = @import("io/buffered_atomic_file.zig");
_ = @import("io/buffered_in_stream.zig");
_ = @import("io/buffered_reader.zig");
_ = @import("io/buffered_writer.zig");
_ = @import("io/c_writer.zig");
_ = @import("io/counting_writer.zig");
_ = @import("io/fixed_buffer_stream.zig");
_ = @import("io/in_stream.zig");
_ = @import("io/reader.zig");
_ = @import("io/writer.zig");
_ = @import("io/peek_stream.zig");
_ = @import("io/seekable_stream.zig");

View File

@ -1,243 +1,5 @@
const std = @import("../std.zig");
const builtin = std.builtin;
const io = std.io;
const assert = std.debug.assert;
const testing = std.testing;
const trait = std.meta.trait;
const meta = std.meta;
const math = std.math;
/// Deprecated: use `std.io.bit_reader.BitReader`
pub const BitInStream = @import("./bit_reader.zig").BitReader;
/// Creates a stream which allows for reading bit fields from another stream
pub fn BitInStream(endian: builtin.Endian, comptime InStreamType: type) type {
return struct {
in_stream: InStreamType,
bit_buffer: u7,
bit_count: u3,
pub const Error = InStreamType.Error;
pub const InStream = io.InStream(*Self, Error, read);
const Self = @This();
const u8_bit_count = comptime meta.bitCount(u8);
const u7_bit_count = comptime meta.bitCount(u7);
const u4_bit_count = comptime meta.bitCount(u4);
pub fn init(in_stream: InStreamType) Self {
return Self{
.in_stream = in_stream,
.bit_buffer = 0,
.bit_count = 0,
};
}
/// Reads `bits` bits from the stream and returns a specified unsigned int type
/// containing them in the least significant end, returning an error if the
/// specified number of bits could not be read.
pub fn readBitsNoEof(self: *Self, comptime U: type, bits: usize) !U {
var n: usize = undefined;
const result = try self.readBits(U, bits, &n);
if (n < bits) return error.EndOfStream;
return result;
}
/// Reads `bits` bits from the stream and returns a specified unsigned int type
/// containing them in the least significant end. The number of bits successfully
/// read is placed in `out_bits`, as reaching the end of the stream is not an error.
pub fn readBits(self: *Self, comptime U: type, bits: usize, out_bits: *usize) Error!U {
comptime assert(trait.isUnsignedInt(U));
//by extending the buffer to a minimum of u8 we can cover a number of edge cases
// related to shifting and casting.
const u_bit_count = comptime meta.bitCount(U);
const buf_bit_count = bc: {
assert(u_bit_count >= bits);
break :bc if (u_bit_count <= u8_bit_count) u8_bit_count else u_bit_count;
};
const Buf = std.meta.Int(false, buf_bit_count);
const BufShift = math.Log2Int(Buf);
out_bits.* = @as(usize, 0);
if (U == u0 or bits == 0) return 0;
var out_buffer = @as(Buf, 0);
if (self.bit_count > 0) {
const n = if (self.bit_count >= bits) @intCast(u3, bits) else self.bit_count;
const shift = u7_bit_count - n;
switch (endian) {
.Big => {
out_buffer = @as(Buf, self.bit_buffer >> shift);
if (n >= u7_bit_count)
self.bit_buffer = 0
else
self.bit_buffer <<= n;
},
.Little => {
const value = (self.bit_buffer << shift) >> shift;
out_buffer = @as(Buf, value);
if (n >= u7_bit_count)
self.bit_buffer = 0
else
self.bit_buffer >>= n;
},
}
self.bit_count -= n;
out_bits.* = n;
}
//at this point we know bit_buffer is empty
//copy bytes until we have enough bits, then leave the rest in bit_buffer
while (out_bits.* < bits) {
const n = bits - out_bits.*;
const next_byte = self.in_stream.readByte() catch |err| {
if (err == error.EndOfStream) {
return @intCast(U, out_buffer);
}
//@BUG: See #1810. Not sure if the bug is that I have to do this for some
// streams, or that I don't for streams with emtpy errorsets.
return @errSetCast(Error, err);
};
switch (endian) {
.Big => {
if (n >= u8_bit_count) {
out_buffer <<= @intCast(u3, u8_bit_count - 1);
out_buffer <<= 1;
out_buffer |= @as(Buf, next_byte);
out_bits.* += u8_bit_count;
continue;
}
const shift = @intCast(u3, u8_bit_count - n);
out_buffer <<= @intCast(BufShift, n);
out_buffer |= @as(Buf, next_byte >> shift);
out_bits.* += n;
self.bit_buffer = @truncate(u7, next_byte << @intCast(u3, n - 1));
self.bit_count = shift;
},
.Little => {
if (n >= u8_bit_count) {
out_buffer |= @as(Buf, next_byte) << @intCast(BufShift, out_bits.*);
out_bits.* += u8_bit_count;
continue;
}
const shift = @intCast(u3, u8_bit_count - n);
const value = (next_byte << shift) >> shift;
out_buffer |= @as(Buf, value) << @intCast(BufShift, out_bits.*);
out_bits.* += n;
self.bit_buffer = @truncate(u7, next_byte >> @intCast(u3, n));
self.bit_count = shift;
},
}
}
return @intCast(U, out_buffer);
}
pub fn alignToByte(self: *Self) void {
self.bit_buffer = 0;
self.bit_count = 0;
}
pub fn read(self: *Self, buffer: []u8) Error!usize {
var out_bits: usize = undefined;
var out_bits_total = @as(usize, 0);
//@NOTE: I'm not sure this is a good idea, maybe alignToByte should be forced
if (self.bit_count > 0) {
for (buffer) |*b, i| {
b.* = try self.readBits(u8, u8_bit_count, &out_bits);
out_bits_total += out_bits;
}
const incomplete_byte = @boolToInt(out_bits_total % u8_bit_count > 0);
return (out_bits_total / u8_bit_count) + incomplete_byte;
}
return self.in_stream.read(buffer);
}
pub fn inStream(self: *Self) InStream {
return .{ .context = self };
}
};
}
pub fn bitInStream(
comptime endian: builtin.Endian,
underlying_stream: var,
) BitInStream(endian, @TypeOf(underlying_stream)) {
return BitInStream(endian, @TypeOf(underlying_stream)).init(underlying_stream);
}
test "api coverage" {
const mem_be = [_]u8{ 0b11001101, 0b00001011 };
const mem_le = [_]u8{ 0b00011101, 0b10010101 };
var mem_in_be = io.fixedBufferStream(&mem_be);
var bit_stream_be = bitInStream(.Big, mem_in_be.inStream());
var out_bits: usize = undefined;
const expect = testing.expect;
const expectError = testing.expectError;
expect(1 == try bit_stream_be.readBits(u2, 1, &out_bits));
expect(out_bits == 1);
expect(2 == try bit_stream_be.readBits(u5, 2, &out_bits));
expect(out_bits == 2);
expect(3 == try bit_stream_be.readBits(u128, 3, &out_bits));
expect(out_bits == 3);
expect(4 == try bit_stream_be.readBits(u8, 4, &out_bits));
expect(out_bits == 4);
expect(5 == try bit_stream_be.readBits(u9, 5, &out_bits));
expect(out_bits == 5);
expect(1 == try bit_stream_be.readBits(u1, 1, &out_bits));
expect(out_bits == 1);
mem_in_be.pos = 0;
bit_stream_be.bit_count = 0;
expect(0b110011010000101 == try bit_stream_be.readBits(u15, 15, &out_bits));
expect(out_bits == 15);
mem_in_be.pos = 0;
bit_stream_be.bit_count = 0;
expect(0b1100110100001011 == try bit_stream_be.readBits(u16, 16, &out_bits));
expect(out_bits == 16);
_ = try bit_stream_be.readBits(u0, 0, &out_bits);
expect(0 == try bit_stream_be.readBits(u1, 1, &out_bits));
expect(out_bits == 0);
expectError(error.EndOfStream, bit_stream_be.readBitsNoEof(u1, 1));
var mem_in_le = io.fixedBufferStream(&mem_le);
var bit_stream_le = bitInStream(.Little, mem_in_le.inStream());
expect(1 == try bit_stream_le.readBits(u2, 1, &out_bits));
expect(out_bits == 1);
expect(2 == try bit_stream_le.readBits(u5, 2, &out_bits));
expect(out_bits == 2);
expect(3 == try bit_stream_le.readBits(u128, 3, &out_bits));
expect(out_bits == 3);
expect(4 == try bit_stream_le.readBits(u8, 4, &out_bits));
expect(out_bits == 4);
expect(5 == try bit_stream_le.readBits(u9, 5, &out_bits));
expect(out_bits == 5);
expect(1 == try bit_stream_le.readBits(u1, 1, &out_bits));
expect(out_bits == 1);
mem_in_le.pos = 0;
bit_stream_le.bit_count = 0;
expect(0b001010100011101 == try bit_stream_le.readBits(u15, 15, &out_bits));
expect(out_bits == 15);
mem_in_le.pos = 0;
bit_stream_le.bit_count = 0;
expect(0b1001010100011101 == try bit_stream_le.readBits(u16, 16, &out_bits));
expect(out_bits == 16);
_ = try bit_stream_le.readBits(u0, 0, &out_bits);
expect(0 == try bit_stream_le.readBits(u1, 1, &out_bits));
expect(out_bits == 0);
expectError(error.EndOfStream, bit_stream_le.readBitsNoEof(u1, 1));
}
/// Deprecated: use `std.io.bit_reader.bitReader`
pub const bitInStream = @import("./bit_reader.zig").bitReader;

250
lib/std/io/bit_reader.zig Normal file
View File

@ -0,0 +1,250 @@
const std = @import("../std.zig");
const builtin = std.builtin;
const io = std.io;
const assert = std.debug.assert;
const testing = std.testing;
const trait = std.meta.trait;
const meta = std.meta;
const math = std.math;
/// Creates a stream which allows for reading bit fields from another stream
pub fn BitReader(endian: builtin.Endian, comptime ReaderType: type) type {
return struct {
forward_reader: ReaderType,
bit_buffer: u7,
bit_count: u3,
pub const Error = ReaderType.Error;
pub const Reader = io.Reader(*Self, Error, read);
/// Deprecated: use `Reader`
pub const InStream = io.InStream(*Self, Error, read);
const Self = @This();
const u8_bit_count = comptime meta.bitCount(u8);
const u7_bit_count = comptime meta.bitCount(u7);
const u4_bit_count = comptime meta.bitCount(u4);
pub fn init(forward_reader: ReaderType) Self {
return Self{
.forward_reader = forward_reader,
.bit_buffer = 0,
.bit_count = 0,
};
}
/// Reads `bits` bits from the stream and returns a specified unsigned int type
/// containing them in the least significant end, returning an error if the
/// specified number of bits could not be read.
pub fn readBitsNoEof(self: *Self, comptime U: type, bits: usize) !U {
var n: usize = undefined;
const result = try self.readBits(U, bits, &n);
if (n < bits) return error.EndOfStream;
return result;
}
/// Reads `bits` bits from the stream and returns a specified unsigned int type
/// containing them in the least significant end. The number of bits successfully
/// read is placed in `out_bits`, as reaching the end of the stream is not an error.
pub fn readBits(self: *Self, comptime U: type, bits: usize, out_bits: *usize) Error!U {
comptime assert(trait.isUnsignedInt(U));
//by extending the buffer to a minimum of u8 we can cover a number of edge cases
// related to shifting and casting.
const u_bit_count = comptime meta.bitCount(U);
const buf_bit_count = bc: {
assert(u_bit_count >= bits);
break :bc if (u_bit_count <= u8_bit_count) u8_bit_count else u_bit_count;
};
const Buf = std.meta.Int(false, buf_bit_count);
const BufShift = math.Log2Int(Buf);
out_bits.* = @as(usize, 0);
if (U == u0 or bits == 0) return 0;
var out_buffer = @as(Buf, 0);
if (self.bit_count > 0) {
const n = if (self.bit_count >= bits) @intCast(u3, bits) else self.bit_count;
const shift = u7_bit_count - n;
switch (endian) {
.Big => {
out_buffer = @as(Buf, self.bit_buffer >> shift);
if (n >= u7_bit_count)
self.bit_buffer = 0
else
self.bit_buffer <<= n;
},
.Little => {
const value = (self.bit_buffer << shift) >> shift;
out_buffer = @as(Buf, value);
if (n >= u7_bit_count)
self.bit_buffer = 0
else
self.bit_buffer >>= n;
},
}
self.bit_count -= n;
out_bits.* = n;
}
//at this point we know bit_buffer is empty
//copy bytes until we have enough bits, then leave the rest in bit_buffer
while (out_bits.* < bits) {
const n = bits - out_bits.*;
const next_byte = self.forward_reader.readByte() catch |err| {
if (err == error.EndOfStream) {
return @intCast(U, out_buffer);
}
//@BUG: See #1810. Not sure if the bug is that I have to do this for some
// streams, or that I don't for streams with emtpy errorsets.
return @errSetCast(Error, err);
};
switch (endian) {
.Big => {
if (n >= u8_bit_count) {
out_buffer <<= @intCast(u3, u8_bit_count - 1);
out_buffer <<= 1;
out_buffer |= @as(Buf, next_byte);
out_bits.* += u8_bit_count;
continue;
}
const shift = @intCast(u3, u8_bit_count - n);
out_buffer <<= @intCast(BufShift, n);
out_buffer |= @as(Buf, next_byte >> shift);
out_bits.* += n;
self.bit_buffer = @truncate(u7, next_byte << @intCast(u3, n - 1));
self.bit_count = shift;
},
.Little => {
if (n >= u8_bit_count) {
out_buffer |= @as(Buf, next_byte) << @intCast(BufShift, out_bits.*);
out_bits.* += u8_bit_count;
continue;
}
const shift = @intCast(u3, u8_bit_count - n);
const value = (next_byte << shift) >> shift;
out_buffer |= @as(Buf, value) << @intCast(BufShift, out_bits.*);
out_bits.* += n;
self.bit_buffer = @truncate(u7, next_byte >> @intCast(u3, n));
self.bit_count = shift;
},
}
}
return @intCast(U, out_buffer);
}
pub fn alignToByte(self: *Self) void {
self.bit_buffer = 0;
self.bit_count = 0;
}
pub fn read(self: *Self, buffer: []u8) Error!usize {
var out_bits: usize = undefined;
var out_bits_total = @as(usize, 0);
//@NOTE: I'm not sure this is a good idea, maybe alignToByte should be forced
if (self.bit_count > 0) {
for (buffer) |*b, i| {
b.* = try self.readBits(u8, u8_bit_count, &out_bits);
out_bits_total += out_bits;
}
const incomplete_byte = @boolToInt(out_bits_total % u8_bit_count > 0);
return (out_bits_total / u8_bit_count) + incomplete_byte;
}
return self.forward_reader.read(buffer);
}
pub fn reader(self: *Self) Reader {
return .{ .context = self };
}
/// Deprecated: use `reader`
pub fn inStream(self: *Self) InStream {
return .{ .context = self };
}
};
}
pub fn bitReader(
comptime endian: builtin.Endian,
underlying_stream: var,
) BitReader(endian, @TypeOf(underlying_stream)) {
return BitReader(endian, @TypeOf(underlying_stream)).init(underlying_stream);
}
test "api coverage" {
const mem_be = [_]u8{ 0b11001101, 0b00001011 };
const mem_le = [_]u8{ 0b00011101, 0b10010101 };
var mem_in_be = io.fixedBufferStream(&mem_be);
var bit_stream_be = bitReader(.Big, mem_in_be.reader());
var out_bits: usize = undefined;
const expect = testing.expect;
const expectError = testing.expectError;
expect(1 == try bit_stream_be.readBits(u2, 1, &out_bits));
expect(out_bits == 1);
expect(2 == try bit_stream_be.readBits(u5, 2, &out_bits));
expect(out_bits == 2);
expect(3 == try bit_stream_be.readBits(u128, 3, &out_bits));
expect(out_bits == 3);
expect(4 == try bit_stream_be.readBits(u8, 4, &out_bits));
expect(out_bits == 4);
expect(5 == try bit_stream_be.readBits(u9, 5, &out_bits));
expect(out_bits == 5);
expect(1 == try bit_stream_be.readBits(u1, 1, &out_bits));
expect(out_bits == 1);
mem_in_be.pos = 0;
bit_stream_be.bit_count = 0;
expect(0b110011010000101 == try bit_stream_be.readBits(u15, 15, &out_bits));
expect(out_bits == 15);
mem_in_be.pos = 0;
bit_stream_be.bit_count = 0;
expect(0b1100110100001011 == try bit_stream_be.readBits(u16, 16, &out_bits));
expect(out_bits == 16);
_ = try bit_stream_be.readBits(u0, 0, &out_bits);
expect(0 == try bit_stream_be.readBits(u1, 1, &out_bits));
expect(out_bits == 0);
expectError(error.EndOfStream, bit_stream_be.readBitsNoEof(u1, 1));
var mem_in_le = io.fixedBufferStream(&mem_le);
var bit_stream_le = bitReader(.Little, mem_in_le.reader());
expect(1 == try bit_stream_le.readBits(u2, 1, &out_bits));
expect(out_bits == 1);
expect(2 == try bit_stream_le.readBits(u5, 2, &out_bits));
expect(out_bits == 2);
expect(3 == try bit_stream_le.readBits(u128, 3, &out_bits));
expect(out_bits == 3);
expect(4 == try bit_stream_le.readBits(u8, 4, &out_bits));
expect(out_bits == 4);
expect(5 == try bit_stream_le.readBits(u9, 5, &out_bits));
expect(out_bits == 5);
expect(1 == try bit_stream_le.readBits(u1, 1, &out_bits));
expect(out_bits == 1);
mem_in_le.pos = 0;
bit_stream_le.bit_count = 0;
expect(0b001010100011101 == try bit_stream_le.readBits(u15, 15, &out_bits));
expect(out_bits == 15);
mem_in_le.pos = 0;
bit_stream_le.bit_count = 0;
expect(0b1001010100011101 == try bit_stream_le.readBits(u16, 16, &out_bits));
expect(out_bits == 16);
_ = try bit_stream_le.readBits(u0, 0, &out_bits);
expect(0 == try bit_stream_le.readBits(u1, 1, &out_bits));
expect(out_bits == 0);
expectError(error.EndOfStream, bit_stream_le.readBitsNoEof(u1, 1));
}

View File

@ -1,86 +1,5 @@
const std = @import("../std.zig");
const io = std.io;
const assert = std.debug.assert;
const testing = std.testing;
/// Deprecated: use `std.io.buffered_reader.BufferedReader`
pub const BufferedInStream = @import("./buffered_reader.zig").BufferedReader;
pub fn BufferedInStream(comptime buffer_size: usize, comptime InStreamType: type) type {
return struct {
unbuffered_in_stream: InStreamType,
fifo: FifoType = FifoType.init(),
pub const Error = InStreamType.Error;
pub const InStream = io.InStream(*Self, Error, read);
const Self = @This();
const FifoType = std.fifo.LinearFifo(u8, std.fifo.LinearFifoBufferType{ .Static = buffer_size });
pub fn read(self: *Self, dest: []u8) Error!usize {
var dest_index: usize = 0;
while (dest_index < dest.len) {
const written = self.fifo.read(dest[dest_index..]);
if (written == 0) {
// fifo empty, fill it
const writable = self.fifo.writableSlice(0);
assert(writable.len > 0);
const n = try self.unbuffered_in_stream.read(writable);
if (n == 0) {
// reading from the unbuffered stream returned nothing
// so we have nothing left to read.
return dest_index;
}
self.fifo.update(n);
}
dest_index += written;
}
return dest.len;
}
pub fn inStream(self: *Self) InStream {
return .{ .context = self };
}
};
}
pub fn bufferedInStream(underlying_stream: var) BufferedInStream(4096, @TypeOf(underlying_stream)) {
return .{ .unbuffered_in_stream = underlying_stream };
}
test "io.BufferedInStream" {
const OneByteReadInStream = struct {
str: []const u8,
curr: usize,
const Error = error{NoError};
const Self = @This();
const InStream = io.InStream(*Self, Error, read);
fn init(str: []const u8) Self {
return Self{
.str = str,
.curr = 0,
};
}
fn read(self: *Self, dest: []u8) Error!usize {
if (self.str.len <= self.curr or dest.len == 0)
return 0;
dest[0] = self.str[self.curr];
self.curr += 1;
return 1;
}
fn inStream(self: *Self) InStream {
return .{ .context = self };
}
};
const str = "This is a test";
var one_byte_stream = OneByteReadInStream.init(str);
var buf_in_stream = bufferedInStream(one_byte_stream.inStream());
const stream = buf_in_stream.inStream();
const res = try stream.readAllAlloc(testing.allocator, str.len + 1);
defer testing.allocator.free(res);
testing.expectEqualSlices(u8, str, res);
}
/// Deprecated: use `std.io.buffered_reader.bufferedReader`
pub const bufferedInStream = @import("./buffered_reader.zig").bufferedReader;

View File

@ -0,0 +1,93 @@
const std = @import("../std.zig");
const io = std.io;
const assert = std.debug.assert;
const testing = std.testing;
pub fn BufferedReader(comptime buffer_size: usize, comptime ReaderType: type) type {
return struct {
unbuffered_reader: ReaderType,
fifo: FifoType = FifoType.init(),
pub const Error = ReaderType.Error;
pub const Reader = io.Reader(*Self, Error, read);
/// Deprecated: use `Reader`
pub const InStream = Reader;
const Self = @This();
const FifoType = std.fifo.LinearFifo(u8, std.fifo.LinearFifoBufferType{ .Static = buffer_size });
pub fn read(self: *Self, dest: []u8) Error!usize {
var dest_index: usize = 0;
while (dest_index < dest.len) {
const written = self.fifo.read(dest[dest_index..]);
if (written == 0) {
// fifo empty, fill it
const writable = self.fifo.writableSlice(0);
assert(writable.len > 0);
const n = try self.unbuffered_reader.read(writable);
if (n == 0) {
// reading from the unbuffered stream returned nothing
// so we have nothing left to read.
return dest_index;
}
self.fifo.update(n);
}
dest_index += written;
}
return dest.len;
}
pub fn reader(self: *Self) Reader {
return .{ .context = self };
}
/// Deprecated: use `reader`
pub fn inStream(self: *Self) InStream {
return .{ .context = self };
}
};
}
pub fn bufferedReader(underlying_stream: var) BufferedReader(4096, @TypeOf(underlying_stream)) {
return .{ .unbuffered_reader = underlying_stream };
}
test "io.BufferedReader" {
const OneByteReadReader = struct {
str: []const u8,
curr: usize,
const Error = error{NoError};
const Self = @This();
const Reader = io.Reader(*Self, Error, read);
fn init(str: []const u8) Self {
return Self{
.str = str,
.curr = 0,
};
}
fn read(self: *Self, dest: []u8) Error!usize {
if (self.str.len <= self.curr or dest.len == 0)
return 0;
dest[0] = self.str[self.curr];
self.curr += 1;
return 1;
}
fn reader(self: *Self) Reader {
return .{ .context = self };
}
};
const str = "This is a test";
var one_byte_stream = OneByteReadReader.init(str);
var buf_reader = bufferedReader(one_byte_stream.reader());
const stream = buf_reader.reader();
const res = try stream.readAllAlloc(testing.allocator, str.len + 1);
defer testing.allocator.free(res);
testing.expectEqualSlices(u8, str, res);
}

View File

@ -4,8 +4,8 @@ const testing = std.testing;
const mem = std.mem;
const assert = std.debug.assert;
/// This turns a byte buffer into an `io.OutStream`, `io.InStream`, or `io.SeekableStream`.
/// If the supplied byte buffer is const, then `io.OutStream` is not available.
/// This turns a byte buffer into an `io.Writer`, `io.Reader`, or `io.SeekableStream`.
/// If the supplied byte buffer is const, then `io.Writer` is not available.
pub fn FixedBufferStream(comptime Buffer: type) type {
return struct {
/// `Buffer` is either a `[]u8` or `[]const u8`.
@ -17,6 +17,8 @@ pub fn FixedBufferStream(comptime Buffer: type) type {
pub const SeekError = error{};
pub const GetSeekPosError = error{};
pub const Reader = io.Reader(*Self, ReadError, read);
/// Deprecated: use `Reader`
pub const InStream = io.InStream(*Self, ReadError, read);
pub const Writer = io.Writer(*Self, WriteError, write);
/// Deprecated: use `Writer`
@ -34,6 +36,11 @@ pub fn FixedBufferStream(comptime Buffer: type) type {
const Self = @This();
pub fn reader(self: *Self) Reader {
return .{ .context = self };
}
/// Deprecated: use `inStream`
pub fn inStream(self: *Self) InStream {
return .{ .context = self };
}
@ -165,14 +172,14 @@ test "FixedBufferStream input" {
var dest: [4]u8 = undefined;
var read = try fbs.inStream().read(dest[0..4]);
var read = try fbs.reader().read(dest[0..4]);
testing.expect(read == 4);
testing.expect(mem.eql(u8, dest[0..4], bytes[0..4]));
read = try fbs.inStream().read(dest[0..4]);
read = try fbs.reader().read(dest[0..4]);
testing.expect(read == 3);
testing.expect(mem.eql(u8, dest[0..3], bytes[4..7]));
read = try fbs.inStream().read(dest[0..4]);
read = try fbs.reader().read(dest[0..4]);
testing.expect(read == 0);
}

View File

@ -1,296 +1,2 @@
const std = @import("../std.zig");
const builtin = std.builtin;
const math = std.math;
const assert = std.debug.assert;
const mem = std.mem;
const testing = std.testing;
pub fn InStream(
comptime Context: type,
comptime ReadError: type,
/// Returns the number of bytes read. It may be less than buffer.len.
/// If the number of bytes read is 0, it means end of stream.
/// End of stream is not an error condition.
comptime readFn: fn (context: Context, buffer: []u8) ReadError!usize,
) type {
return struct {
pub const Error = ReadError;
context: Context,
const Self = @This();
/// Returns the number of bytes read. It may be less than buffer.len.
/// If the number of bytes read is 0, it means end of stream.
/// End of stream is not an error condition.
pub fn read(self: Self, buffer: []u8) Error!usize {
return readFn(self.context, buffer);
}
/// Returns the number of bytes read. If the number read is smaller than `buffer.len`, it
/// means the stream reached the end. Reaching the end of a stream is not an error
/// condition.
pub fn readAll(self: Self, buffer: []u8) Error!usize {
var index: usize = 0;
while (index != buffer.len) {
const amt = try self.read(buffer[index..]);
if (amt == 0) return index;
index += amt;
}
return index;
}
/// Returns the number of bytes read. If the number read would be smaller than buf.len,
/// error.EndOfStream is returned instead.
pub fn readNoEof(self: Self, buf: []u8) !void {
const amt_read = try self.readAll(buf);
if (amt_read < buf.len) return error.EndOfStream;
}
pub const readAllBuffer = @compileError("deprecated; use readAllArrayList()");
/// Appends to the `std.ArrayList` contents by reading from the stream until end of stream is found.
/// If the number of bytes appended would exceed `max_append_size`, `error.StreamTooLong` is returned
/// and the `std.ArrayList` has exactly `max_append_size` bytes appended.
pub fn readAllArrayList(self: Self, array_list: *std.ArrayList(u8), max_append_size: usize) !void {
try array_list.ensureCapacity(math.min(max_append_size, 4096));
const original_len = array_list.items.len;
var start_index: usize = original_len;
while (true) {
array_list.expandToCapacity();
const dest_slice = array_list.span()[start_index..];
const bytes_read = try self.readAll(dest_slice);
start_index += bytes_read;
if (start_index - original_len > max_append_size) {
array_list.shrink(original_len + max_append_size);
return error.StreamTooLong;
}
if (bytes_read != dest_slice.len) {
array_list.shrink(start_index);
return;
}
// This will trigger ArrayList to expand superlinearly at whatever its growth rate is.
try array_list.ensureCapacity(start_index + 1);
}
}
/// Allocates enough memory to hold all the contents of the stream. If the allocated
/// memory would be greater than `max_size`, returns `error.StreamTooLong`.
/// Caller owns returned memory.
/// If this function returns an error, the contents from the stream read so far are lost.
pub fn readAllAlloc(self: Self, allocator: *mem.Allocator, max_size: usize) ![]u8 {
var array_list = std.ArrayList(u8).init(allocator);
defer array_list.deinit();
try self.readAllArrayList(&array_list, max_size);
return array_list.toOwnedSlice();
}
/// Replaces the `std.ArrayList` contents by reading from the stream until `delimiter` is found.
/// Does not include the delimiter in the result.
/// If the `std.ArrayList` length would exceed `max_size`, `error.StreamTooLong` is returned and the
/// `std.ArrayList` is populated with `max_size` bytes from the stream.
pub fn readUntilDelimiterArrayList(
self: Self,
array_list: *std.ArrayList(u8),
delimiter: u8,
max_size: usize,
) !void {
array_list.shrink(0);
while (true) {
var byte: u8 = try self.readByte();
if (byte == delimiter) {
return;
}
if (array_list.items.len == max_size) {
return error.StreamTooLong;
}
try array_list.append(byte);
}
}
/// Allocates enough memory to read until `delimiter`. If the allocated
/// memory would be greater than `max_size`, returns `error.StreamTooLong`.
/// Caller owns returned memory.
/// If this function returns an error, the contents from the stream read so far are lost.
pub fn readUntilDelimiterAlloc(
self: Self,
allocator: *mem.Allocator,
delimiter: u8,
max_size: usize,
) ![]u8 {
var array_list = std.ArrayList(u8).init(allocator);
defer array_list.deinit();
try self.readUntilDelimiterArrayList(&array_list, delimiter, max_size);
return array_list.toOwnedSlice();
}
/// Reads from the stream until specified byte is found. If the buffer is not
/// large enough to hold the entire contents, `error.StreamTooLong` is returned.
/// If end-of-stream is found, returns the rest of the stream. If this
/// function is called again after that, returns null.
/// Returns a slice of the stream data, with ptr equal to `buf.ptr`. The
/// delimiter byte is not included in the returned slice.
pub fn readUntilDelimiterOrEof(self: Self, buf: []u8, delimiter: u8) !?[]u8 {
var index: usize = 0;
while (true) {
const byte = self.readByte() catch |err| switch (err) {
error.EndOfStream => {
if (index == 0) {
return null;
} else {
return buf[0..index];
}
},
else => |e| return e,
};
if (byte == delimiter) return buf[0..index];
if (index >= buf.len) return error.StreamTooLong;
buf[index] = byte;
index += 1;
}
}
/// Reads from the stream until specified byte is found, discarding all data,
/// including the delimiter.
/// If end-of-stream is found, this function succeeds.
pub fn skipUntilDelimiterOrEof(self: Self, delimiter: u8) !void {
while (true) {
const byte = self.readByte() catch |err| switch (err) {
error.EndOfStream => return,
else => |e| return e,
};
if (byte == delimiter) return;
}
}
/// Reads 1 byte from the stream or returns `error.EndOfStream`.
pub fn readByte(self: Self) !u8 {
var result: [1]u8 = undefined;
const amt_read = try self.read(result[0..]);
if (amt_read < 1) return error.EndOfStream;
return result[0];
}
/// Same as `readByte` except the returned byte is signed.
pub fn readByteSigned(self: Self) !i8 {
return @bitCast(i8, try self.readByte());
}
/// Reads exactly `num_bytes` bytes and returns as an array.
/// `num_bytes` must be comptime-known
pub fn readBytesNoEof(self: Self, comptime num_bytes: usize) ![num_bytes]u8 {
var bytes: [num_bytes]u8 = undefined;
try self.readNoEof(&bytes);
return bytes;
}
/// Reads a native-endian integer
pub fn readIntNative(self: Self, comptime T: type) !T {
const bytes = try self.readBytesNoEof((T.bit_count + 7) / 8);
return mem.readIntNative(T, &bytes);
}
/// Reads a foreign-endian integer
pub fn readIntForeign(self: Self, comptime T: type) !T {
const bytes = try self.readBytesNoEof((T.bit_count + 7) / 8);
return mem.readIntForeign(T, &bytes);
}
pub fn readIntLittle(self: Self, comptime T: type) !T {
const bytes = try self.readBytesNoEof((T.bit_count + 7) / 8);
return mem.readIntLittle(T, &bytes);
}
pub fn readIntBig(self: Self, comptime T: type) !T {
const bytes = try self.readBytesNoEof((T.bit_count + 7) / 8);
return mem.readIntBig(T, &bytes);
}
pub fn readInt(self: Self, comptime T: type, endian: builtin.Endian) !T {
const bytes = try self.readBytesNoEof((T.bit_count + 7) / 8);
return mem.readInt(T, &bytes, endian);
}
pub fn readVarInt(self: Self, comptime ReturnType: type, endian: builtin.Endian, size: usize) !ReturnType {
assert(size <= @sizeOf(ReturnType));
var bytes_buf: [@sizeOf(ReturnType)]u8 = undefined;
const bytes = bytes_buf[0..size];
try self.readNoEof(bytes);
return mem.readVarInt(ReturnType, bytes, endian);
}
pub fn skipBytes(self: Self, num_bytes: u64) !void {
var i: u64 = 0;
while (i < num_bytes) : (i += 1) {
_ = try self.readByte();
}
}
/// Reads `slice.len` bytes from the stream and returns if they are the same as the passed slice
pub fn isBytes(self: Self, slice: []const u8) !bool {
var i: usize = 0;
var matches = true;
while (i < slice.len) : (i += 1) {
if (slice[i] != try self.readByte()) {
matches = false;
}
}
return matches;
}
pub fn readStruct(self: Self, comptime T: type) !T {
// Only extern and packed structs have defined in-memory layout.
comptime assert(@typeInfo(T).Struct.layout != builtin.TypeInfo.ContainerLayout.Auto);
var res: [1]T = undefined;
try self.readNoEof(mem.sliceAsBytes(res[0..]));
return res[0];
}
/// Reads an integer with the same size as the given enum's tag type. If the integer matches
/// an enum tag, casts the integer to the enum tag and returns it. Otherwise, returns an error.
/// TODO optimization taking advantage of most fields being in order
pub fn readEnum(self: Self, comptime Enum: type, endian: builtin.Endian) !Enum {
const E = error{
/// An integer was read, but it did not match any of the tags in the supplied enum.
InvalidValue,
};
const type_info = @typeInfo(Enum).Enum;
const tag = try self.readInt(type_info.tag_type, endian);
inline for (std.meta.fields(Enum)) |field| {
if (tag == field.value) {
return @field(Enum, field.name);
}
}
return E.InvalidValue;
}
};
}
test "InStream" {
var buf = "a\x02".*;
const in_stream = std.io.fixedBufferStream(&buf).inStream();
testing.expect((try in_stream.readByte()) == 'a');
testing.expect((try in_stream.readEnum(enum(u8) {
a = 0,
b = 99,
c = 2,
d = 3,
}, undefined)) == .c);
testing.expectError(error.EndOfStream, in_stream.readByte());
}
test "InStream.isBytes" {
const in_stream = std.io.fixedBufferStream("foobar").inStream();
testing.expectEqual(true, try in_stream.isBytes("foo"));
testing.expectEqual(false, try in_stream.isBytes("qux"));
}
/// Deprecated: use `std.io.reader.Reader`
pub const InStream = @import("./reader.zig").Reader;

View File

@ -5,24 +5,26 @@ const testing = std.testing;
/// Creates a stream which supports 'un-reading' data, so that it can be read again.
/// This makes look-ahead style parsing much easier.
/// TODO merge this with `std.io.BufferedInStream`: https://github.com/ziglang/zig/issues/4501
/// TODO merge this with `std.io.BufferedReader`: https://github.com/ziglang/zig/issues/4501
pub fn PeekStream(
comptime buffer_type: std.fifo.LinearFifoBufferType,
comptime InStreamType: type,
comptime ReaderType: type,
) type {
return struct {
unbuffered_in_stream: InStreamType,
unbuffered_in_stream: ReaderType,
fifo: FifoType,
pub const Error = InStreamType.Error;
pub const InStream = io.InStream(*Self, Error, read);
pub const Error = ReaderType.Error;
pub const Reader = io.Reader(*Self, Error, read);
/// Deprecated: use `Reader`
pub const InStream = Reader;
const Self = @This();
const FifoType = std.fifo.LinearFifo(u8, buffer_type);
pub usingnamespace switch (buffer_type) {
.Static => struct {
pub fn init(base: InStreamType) Self {
pub fn init(base: ReaderType) Self {
return .{
.unbuffered_in_stream = base,
.fifo = FifoType.init(),
@ -30,7 +32,7 @@ pub fn PeekStream(
}
},
.Slice => struct {
pub fn init(base: InStreamType, buf: []u8) Self {
pub fn init(base: ReaderType, buf: []u8) Self {
return .{
.unbuffered_in_stream = base,
.fifo = FifoType.init(buf),
@ -38,7 +40,7 @@ pub fn PeekStream(
}
},
.Dynamic => struct {
pub fn init(base: InStreamType, allocator: *mem.Allocator) Self {
pub fn init(base: ReaderType, allocator: *mem.Allocator) Self {
return .{
.unbuffered_in_stream = base,
.fifo = FifoType.init(allocator),
@ -65,6 +67,11 @@ pub fn PeekStream(
return dest_index;
}
pub fn reader(self: *Self) Reader {
return .{ .context = self };
}
/// Deprecated: use `reader`
pub fn inStream(self: *Self) InStream {
return .{ .context = self };
}
@ -81,31 +88,31 @@ pub fn peekStream(
test "PeekStream" {
const bytes = [_]u8{ 1, 2, 3, 4, 5, 6, 7, 8 };
var fbs = io.fixedBufferStream(&bytes);
var ps = peekStream(2, fbs.inStream());
var ps = peekStream(2, fbs.reader());
var dest: [4]u8 = undefined;
try ps.putBackByte(9);
try ps.putBackByte(10);
var read = try ps.inStream().read(dest[0..4]);
var read = try ps.reader().read(dest[0..4]);
testing.expect(read == 4);
testing.expect(dest[0] == 10);
testing.expect(dest[1] == 9);
testing.expect(mem.eql(u8, dest[2..4], bytes[0..2]));
read = try ps.inStream().read(dest[0..4]);
read = try ps.reader().read(dest[0..4]);
testing.expect(read == 4);
testing.expect(mem.eql(u8, dest[0..4], bytes[2..6]));
read = try ps.inStream().read(dest[0..4]);
read = try ps.reader().read(dest[0..4]);
testing.expect(read == 2);
testing.expect(mem.eql(u8, dest[0..2], bytes[6..8]));
try ps.putBackByte(11);
try ps.putBackByte(12);
read = try ps.inStream().read(dest[0..4]);
read = try ps.reader().read(dest[0..4]);
testing.expect(read == 2);
testing.expect(dest[0] == 12);
testing.expect(dest[1] == 11);

296
lib/std/io/reader.zig Normal file
View File

@ -0,0 +1,296 @@
const std = @import("../std.zig");
const builtin = std.builtin;
const math = std.math;
const assert = std.debug.assert;
const mem = std.mem;
const testing = std.testing;
pub fn Reader(
comptime Context: type,
comptime ReadError: type,
/// Returns the number of bytes read. It may be less than buffer.len.
/// If the number of bytes read is 0, it means end of stream.
/// End of stream is not an error condition.
comptime readFn: fn (context: Context, buffer: []u8) ReadError!usize,
) type {
return struct {
pub const Error = ReadError;
context: Context,
const Self = @This();
/// Returns the number of bytes read. It may be less than buffer.len.
/// If the number of bytes read is 0, it means end of stream.
/// End of stream is not an error condition.
pub fn read(self: Self, buffer: []u8) Error!usize {
return readFn(self.context, buffer);
}
/// Returns the number of bytes read. If the number read is smaller than `buffer.len`, it
/// means the stream reached the end. Reaching the end of a stream is not an error
/// condition.
pub fn readAll(self: Self, buffer: []u8) Error!usize {
var index: usize = 0;
while (index != buffer.len) {
const amt = try self.read(buffer[index..]);
if (amt == 0) return index;
index += amt;
}
return index;
}
/// Returns the number of bytes read. If the number read would be smaller than buf.len,
/// error.EndOfStream is returned instead.
pub fn readNoEof(self: Self, buf: []u8) !void {
const amt_read = try self.readAll(buf);
if (amt_read < buf.len) return error.EndOfStream;
}
pub const readAllBuffer = @compileError("deprecated; use readAllArrayList()");
/// Appends to the `std.ArrayList` contents by reading from the stream until end of stream is found.
/// If the number of bytes appended would exceed `max_append_size`, `error.StreamTooLong` is returned
/// and the `std.ArrayList` has exactly `max_append_size` bytes appended.
pub fn readAllArrayList(self: Self, array_list: *std.ArrayList(u8), max_append_size: usize) !void {
try array_list.ensureCapacity(math.min(max_append_size, 4096));
const original_len = array_list.items.len;
var start_index: usize = original_len;
while (true) {
array_list.expandToCapacity();
const dest_slice = array_list.span()[start_index..];
const bytes_read = try self.readAll(dest_slice);
start_index += bytes_read;
if (start_index - original_len > max_append_size) {
array_list.shrink(original_len + max_append_size);
return error.StreamTooLong;
}
if (bytes_read != dest_slice.len) {
array_list.shrink(start_index);
return;
}
// This will trigger ArrayList to expand superlinearly at whatever its growth rate is.
try array_list.ensureCapacity(start_index + 1);
}
}
/// Allocates enough memory to hold all the contents of the stream. If the allocated
/// memory would be greater than `max_size`, returns `error.StreamTooLong`.
/// Caller owns returned memory.
/// If this function returns an error, the contents from the stream read so far are lost.
pub fn readAllAlloc(self: Self, allocator: *mem.Allocator, max_size: usize) ![]u8 {
var array_list = std.ArrayList(u8).init(allocator);
defer array_list.deinit();
try self.readAllArrayList(&array_list, max_size);
return array_list.toOwnedSlice();
}
/// Replaces the `std.ArrayList` contents by reading from the stream until `delimiter` is found.
/// Does not include the delimiter in the result.
/// If the `std.ArrayList` length would exceed `max_size`, `error.StreamTooLong` is returned and the
/// `std.ArrayList` is populated with `max_size` bytes from the stream.
pub fn readUntilDelimiterArrayList(
self: Self,
array_list: *std.ArrayList(u8),
delimiter: u8,
max_size: usize,
) !void {
array_list.shrink(0);
while (true) {
var byte: u8 = try self.readByte();
if (byte == delimiter) {
return;
}
if (array_list.items.len == max_size) {
return error.StreamTooLong;
}
try array_list.append(byte);
}
}
/// Allocates enough memory to read until `delimiter`. If the allocated
/// memory would be greater than `max_size`, returns `error.StreamTooLong`.
/// Caller owns returned memory.
/// If this function returns an error, the contents from the stream read so far are lost.
pub fn readUntilDelimiterAlloc(
self: Self,
allocator: *mem.Allocator,
delimiter: u8,
max_size: usize,
) ![]u8 {
var array_list = std.ArrayList(u8).init(allocator);
defer array_list.deinit();
try self.readUntilDelimiterArrayList(&array_list, delimiter, max_size);
return array_list.toOwnedSlice();
}
/// Reads from the stream until specified byte is found. If the buffer is not
/// large enough to hold the entire contents, `error.StreamTooLong` is returned.
/// If end-of-stream is found, returns the rest of the stream. If this
/// function is called again after that, returns null.
/// Returns a slice of the stream data, with ptr equal to `buf.ptr`. The
/// delimiter byte is not included in the returned slice.
pub fn readUntilDelimiterOrEof(self: Self, buf: []u8, delimiter: u8) !?[]u8 {
var index: usize = 0;
while (true) {
const byte = self.readByte() catch |err| switch (err) {
error.EndOfStream => {
if (index == 0) {
return null;
} else {
return buf[0..index];
}
},
else => |e| return e,
};
if (byte == delimiter) return buf[0..index];
if (index >= buf.len) return error.StreamTooLong;
buf[index] = byte;
index += 1;
}
}
/// Reads from the stream until specified byte is found, discarding all data,
/// including the delimiter.
/// If end-of-stream is found, this function succeeds.
pub fn skipUntilDelimiterOrEof(self: Self, delimiter: u8) !void {
while (true) {
const byte = self.readByte() catch |err| switch (err) {
error.EndOfStream => return,
else => |e| return e,
};
if (byte == delimiter) return;
}
}
/// Reads 1 byte from the stream or returns `error.EndOfStream`.
pub fn readByte(self: Self) !u8 {
var result: [1]u8 = undefined;
const amt_read = try self.read(result[0..]);
if (amt_read < 1) return error.EndOfStream;
return result[0];
}
/// Same as `readByte` except the returned byte is signed.
pub fn readByteSigned(self: Self) !i8 {
return @bitCast(i8, try self.readByte());
}
/// Reads exactly `num_bytes` bytes and returns as an array.
/// `num_bytes` must be comptime-known
pub fn readBytesNoEof(self: Self, comptime num_bytes: usize) ![num_bytes]u8 {
var bytes: [num_bytes]u8 = undefined;
try self.readNoEof(&bytes);
return bytes;
}
/// Reads a native-endian integer
pub fn readIntNative(self: Self, comptime T: type) !T {
const bytes = try self.readBytesNoEof((T.bit_count + 7) / 8);
return mem.readIntNative(T, &bytes);
}
/// Reads a foreign-endian integer
pub fn readIntForeign(self: Self, comptime T: type) !T {
const bytes = try self.readBytesNoEof((T.bit_count + 7) / 8);
return mem.readIntForeign(T, &bytes);
}
pub fn readIntLittle(self: Self, comptime T: type) !T {
const bytes = try self.readBytesNoEof((T.bit_count + 7) / 8);
return mem.readIntLittle(T, &bytes);
}
pub fn readIntBig(self: Self, comptime T: type) !T {
const bytes = try self.readBytesNoEof((T.bit_count + 7) / 8);
return mem.readIntBig(T, &bytes);
}
pub fn readInt(self: Self, comptime T: type, endian: builtin.Endian) !T {
const bytes = try self.readBytesNoEof((T.bit_count + 7) / 8);
return mem.readInt(T, &bytes, endian);
}
pub fn readVarInt(self: Self, comptime ReturnType: type, endian: builtin.Endian, size: usize) !ReturnType {
assert(size <= @sizeOf(ReturnType));
var bytes_buf: [@sizeOf(ReturnType)]u8 = undefined;
const bytes = bytes_buf[0..size];
try self.readNoEof(bytes);
return mem.readVarInt(ReturnType, bytes, endian);
}
pub fn skipBytes(self: Self, num_bytes: u64) !void {
var i: u64 = 0;
while (i < num_bytes) : (i += 1) {
_ = try self.readByte();
}
}
/// Reads `slice.len` bytes from the stream and returns if they are the same as the passed slice
pub fn isBytes(self: Self, slice: []const u8) !bool {
var i: usize = 0;
var matches = true;
while (i < slice.len) : (i += 1) {
if (slice[i] != try self.readByte()) {
matches = false;
}
}
return matches;
}
pub fn readStruct(self: Self, comptime T: type) !T {
// Only extern and packed structs have defined in-memory layout.
comptime assert(@typeInfo(T).Struct.layout != builtin.TypeInfo.ContainerLayout.Auto);
var res: [1]T = undefined;
try self.readNoEof(mem.sliceAsBytes(res[0..]));
return res[0];
}
/// Reads an integer with the same size as the given enum's tag type. If the integer matches
/// an enum tag, casts the integer to the enum tag and returns it. Otherwise, returns an error.
/// TODO optimization taking advantage of most fields being in order
pub fn readEnum(self: Self, comptime Enum: type, endian: builtin.Endian) !Enum {
const E = error{
/// An integer was read, but it did not match any of the tags in the supplied enum.
InvalidValue,
};
const type_info = @typeInfo(Enum).Enum;
const tag = try self.readInt(type_info.tag_type, endian);
inline for (std.meta.fields(Enum)) |field| {
if (tag == field.value) {
return @field(Enum, field.name);
}
}
return E.InvalidValue;
}
};
}
test "Reader" {
var buf = "a\x02".*;
const reader = std.io.fixedBufferStream(&buf).reader();
testing.expect((try reader.readByte()) == 'a');
testing.expect((try reader.readEnum(enum(u8) {
a = 0,
b = 99,
c = 2,
d = 3,
}, undefined)) == .c);
testing.expectError(error.EndOfStream, reader.readByte());
}
test "Reader.isBytes" {
const reader = std.io.fixedBufferStream("foobar").reader();
testing.expectEqual(true, try reader.isBytes("foo"));
testing.expectEqual(false, try reader.isBytes("qux"));
}

View File

@ -1,5 +1,4 @@
const std = @import("../std.zig");
const InStream = std.io.InStream;
pub fn SeekableStream(
comptime Context: type,

View File

@ -24,16 +24,16 @@ pub const Packing = enum {
/// which will be called when the deserializer is used to deserialize
/// that type. It will pass a pointer to the type instance to deserialize
/// into and a pointer to the deserializer struct.
pub fn Deserializer(comptime endian: builtin.Endian, comptime packing: Packing, comptime InStreamType: type) type {
pub fn Deserializer(comptime endian: builtin.Endian, comptime packing: Packing, comptime ReaderType: type) type {
return struct {
in_stream: if (packing == .Bit) io.BitInStream(endian, InStreamType) else InStreamType,
in_stream: if (packing == .Bit) io.BitReader(endian, ReaderType) else ReaderType,
const Self = @This();
pub fn init(in_stream: InStreamType) Self {
pub fn init(in_stream: ReaderType) Self {
return Self{
.in_stream = switch (packing) {
.Bit => io.bitInStream(endian, in_stream),
.Bit => io.bitReader(endian, in_stream),
.Byte => in_stream,
},
};
@ -45,7 +45,7 @@ pub fn Deserializer(comptime endian: builtin.Endian, comptime packing: Packing,
}
//@BUG: inferred error issue. See: #1386
fn deserializeInt(self: *Self, comptime T: type) (InStreamType.Error || error{EndOfStream})!T {
fn deserializeInt(self: *Self, comptime T: type) (ReaderType.Error || error{EndOfStream})!T {
comptime assert(trait.is(.Int)(T) or trait.is(.Float)(T));
const u8_bit_count = 8;
@ -368,7 +368,7 @@ fn testIntSerializerDeserializer(comptime endian: builtin.Endian, comptime packi
var _serializer = serializer(endian, packing, out.outStream());
var in = io.fixedBufferStream(&data_mem);
var _deserializer = deserializer(endian, packing, in.inStream());
var _deserializer = deserializer(endian, packing, in.reader());
comptime var i = 0;
inline while (i <= max_test_bitsize) : (i += 1) {
@ -425,7 +425,7 @@ fn testIntSerializerDeserializerInfNaN(
var _serializer = serializer(endian, packing, out.outStream());
var in = io.fixedBufferStream(&data_mem);
var _deserializer = deserializer(endian, packing, in.inStream());
var _deserializer = deserializer(endian, packing, in.reader());
//@TODO: isInf/isNan not currently implemented for f128.
try _serializer.serialize(std.math.nan(f16));
@ -554,7 +554,7 @@ fn testSerializerDeserializer(comptime endian: builtin.Endian, comptime packing:
var _serializer = serializer(endian, packing, out.outStream());
var in = io.fixedBufferStream(&data_mem);
var _deserializer = deserializer(endian, packing, in.inStream());
var _deserializer = deserializer(endian, packing, in.reader());
try _serializer.serialize(my_inst);
@ -589,7 +589,7 @@ fn testBadData(comptime endian: builtin.Endian, comptime packing: io.Packing) !v
var _serializer = serializer(endian, packing, out.outStream());
var in = io.fixedBufferStream(&data_mem);
var _deserializer = deserializer(endian, packing, in.inStream());
var _deserializer = deserializer(endian, packing, in.reader());
try _serializer.serialize(@as(u14, 3));
testing.expectError(error.InvalidEnumTag, _deserializer.deserialize(A));

View File

@ -2,7 +2,7 @@ const std = @import("../std.zig");
const io = std.io;
const testing = std.testing;
/// Provides `io.InStream`, `io.OutStream`, and `io.SeekableStream` for in-memory buffers as
/// Provides `io.Reader`, `io.Writer`, and `io.SeekableStream` for in-memory buffers as
/// well as files.
/// For memory sources, if the supplied byte buffer is const, then `io.OutStream` is not available.
/// The error set of the stream functions is the error set of the corresponding file functions.
@ -16,8 +16,12 @@ pub const StreamSource = union(enum) {
pub const SeekError = std.fs.File.SeekError;
pub const GetSeekPosError = std.fs.File.GetPosError;
pub const InStream = io.InStream(*StreamSource, ReadError, read);
pub const OutStream = io.OutStream(*StreamSource, WriteError, write);
pub const Reader = io.Reader(*StreamSource, ReadError, read);
/// Deprecated: use `Reader`
pub const InStream = Reader;
pub const Writer = io.Writer(*StreamSource, WriteError, write);
/// Deprecated: use `Writer`
pub const OutStream = Writer;
pub const SeekableStream = io.SeekableStream(
*StreamSource,
SeekError,
@ -76,10 +80,20 @@ pub const StreamSource = union(enum) {
}
}
pub fn reader(self: *StreamSource) Reader {
return .{ .context = self };
}
/// Deprecated: use `reader`
pub fn inStream(self: *StreamSource) InStream {
return .{ .context = self };
}
pub fn writer(self: *StreamSource) Writer {
return .{ .context = self };
}
/// Deprecated: use `writer`
pub fn outStream(self: *StreamSource) OutStream {
return .{ .context = self };
}

View File

@ -50,8 +50,8 @@ test "write a file, read it, then delete it" {
const expected_file_size: u64 = "begin".len + data.len + "end".len;
expectEqual(expected_file_size, file_size);
var buf_stream = io.bufferedInStream(file.inStream());
const st = buf_stream.inStream();
var buf_stream = io.bufferedReader(file.reader());
const st = buf_stream.reader();
const contents = try st.readAllAlloc(std.testing.allocator, 2 * 1024);
defer std.testing.allocator.free(contents);
@ -85,7 +85,7 @@ test "BitStreams with File Stream" {
var file = try tmp.dir.openFile(tmp_file_name, .{});
defer file.close();
var bit_stream = io.bitInStream(builtin.endian, file.inStream());
var bit_stream = io.bitReader(builtin.endian, file.reader());
var out_bits: usize = undefined;

View File

@ -495,7 +495,7 @@ const Msf = struct {
streams: []MsfStream,
fn openFile(self: *Msf, allocator: *mem.Allocator, file: File) !void {
const in = file.inStream();
const in = file.reader();
const superblock = try in.readStruct(SuperBlock);
@ -528,7 +528,7 @@ const Msf = struct {
);
const begin = self.directory.pos;
const stream_count = try self.directory.inStream().readIntLittle(u32);
const stream_count = try self.directory.reader().readIntLittle(u32);
const stream_sizes = try allocator.alloc(u32, stream_count);
defer allocator.free(stream_sizes);
@ -537,7 +537,7 @@ const Msf = struct {
// and must be taken into account when resolving stream indices.
const Nil = 0xFFFFFFFF;
for (stream_sizes) |*s, i| {
const size = try self.directory.inStream().readIntLittle(u32);
const size = try self.directory.reader().readIntLittle(u32);
s.* = if (size == Nil) 0 else blockCountFromSize(size, superblock.BlockSize);
}
@ -552,7 +552,7 @@ const Msf = struct {
var blocks = try allocator.alloc(u32, size);
var j: u32 = 0;
while (j < size) : (j += 1) {
const block_id = try self.directory.inStream().readIntLittle(u32);
const block_id = try self.directory.reader().readIntLittle(u32);
const n = (block_id % superblock.BlockSize);
// 0 is for SuperBlock, 1 and 2 for FPMs.
if (block_id == 0 or n == 1 or n == 2 or block_id * superblock.BlockSize > try file.getEndPos())
@ -647,7 +647,7 @@ const MsfStream = struct {
pub fn readNullTermString(self: *MsfStream, allocator: *mem.Allocator) ![]u8 {
var list = ArrayList(u8).init(allocator);
while (true) {
const byte = try self.inStream().readByte();
const byte = try self.reader().readByte();
if (byte == 0) {
return list.span();
}
@ -661,7 +661,7 @@ const MsfStream = struct {
var offset = self.pos % self.block_size;
try self.in_file.seekTo(block * self.block_size + offset);
const in = self.in_file.inStream();
const in = self.in_file.reader();
var size: usize = 0;
var rem_buffer = buffer;
@ -708,6 +708,10 @@ const MsfStream = struct {
return block * self.block_size + offset;
}
pub fn reader(self: *MsfStream) std.io.Reader(*MsfStream, Error, read) {
return .{ .context = self };
}
/// Deprecated: use `reader`
pub fn inStream(self: *MsfStream) std.io.InStream(*MsfStream, Error, read) {
return .{ .context = self };
}

View File

@ -615,8 +615,8 @@ pub fn getUserInfo(name: []const u8) !UserInfo {
/// TODO this reads /etc/passwd. But sometimes the user/id mapping is in something else
/// like NIS, AD, etc. See `man nss` or look at an strace for `id myuser`.
pub fn posixGetUserInfo(name: []const u8) !UserInfo {
var in_stream = try io.InStream.open("/etc/passwd", null);
defer in_stream.close();
var reader = try io.Reader.open("/etc/passwd", null);
defer reader.close();
const State = enum {
Start,
@ -633,7 +633,7 @@ pub fn posixGetUserInfo(name: []const u8) !UserInfo {
var gid: u32 = 0;
while (true) {
const amt_read = try in_stream.read(buf[0..]);
const amt_read = try reader.read(buf[0..]);
for (buf[0..amt_read]) |byte| {
switch (state) {
.Start => switch (byte) {