dep tokenizer: run zig fmt and move exports to canonical location

This commit is contained in:
Andrew Kelley 2019-05-30 12:07:55 -04:00
parent 5954d5235f
commit 7878f9660f
No known key found for this signature in database
GPG Key ID: 7C5F548F728501A9
2 changed files with 75 additions and 102 deletions

View File

@ -359,63 +359,6 @@ pub const Tokenizer = struct {
};
};
export fn stage2_DepTokenizer_init(input: [*]const u8, len: usize) stage2_DepTokenizer {
const t = std.heap.c_allocator.create(Tokenizer) catch @panic("failed to create .d tokenizer");
t.* = Tokenizer.init(std.heap.c_allocator, input[0..len]);
return stage2_DepTokenizer{
.handle = t,
};
}
export fn stage2_DepTokenizer_deinit(self: *stage2_DepTokenizer) void {
self.handle.deinit();
}
export fn stage2_DepTokenizer_next(self: *stage2_DepTokenizer) stage2_DepNextResult {
const otoken = self.handle.next() catch {
const textz = std.Buffer.init(&self.handle.arena.allocator, self.handle.error_text) catch @panic("failed to create .d tokenizer error text");
return stage2_DepNextResult{
.type_id = .error_,
.textz = textz.toSlice().ptr,
};
};
const token = otoken orelse {
return stage2_DepNextResult{
.type_id = .null_,
.textz = undefined,
};
};
const textz = std.Buffer.init(&self.handle.arena.allocator, token.bytes) catch @panic("failed to create .d tokenizer token text");
return stage2_DepNextResult{
.type_id = switch (token.id) {
.target => stage2_DepNextResult.TypeId.target,
.prereq => stage2_DepNextResult.TypeId.prereq,
},
.textz = textz.toSlice().ptr,
};
}
export const stage2_DepTokenizer = extern struct {
handle: *Tokenizer,
};
export const stage2_DepNextResult = extern struct {
type_id: TypeId,
// when type_id == error --> error text
// when type_id == null --> undefined
// when type_id == target --> target pathname
// when type_id == prereq --> prereq pathname
textz: [*]const u8,
export const TypeId = extern enum {
error_,
null_,
target,
prereq,
};
};
test "empty file" {
try depTokenizer("", "");
}
@ -469,78 +412,54 @@ test "empty target linefeeds" {
const expect = "target = {foo.o}";
try depTokenizer(
\\foo.o:
,
expect
);
, expect);
try depTokenizer(
\\foo.o:
\\
,
expect
);
, expect);
try depTokenizer(
\\foo.o:
,
expect
);
, expect);
try depTokenizer(
\\foo.o:
\\
,
expect
);
, expect);
}
test "empty target linefeeds + continuations" {
const expect = "target = {foo.o}";
try depTokenizer(
\\foo.o:\
,
expect
);
, expect);
try depTokenizer(
\\foo.o:\
\\
,
expect
);
, expect);
try depTokenizer(
\\foo.o:\
,
expect
);
, expect);
try depTokenizer(
\\foo.o:\
\\
,
expect
);
, expect);
}
test "empty target linefeeds + hspace + continuations" {
const expect = "target = {foo.o}";
try depTokenizer(
\\foo.o: \
,
expect
);
, expect);
try depTokenizer(
\\foo.o: \
\\
,
expect
);
, expect);
try depTokenizer(
\\foo.o: \
,
expect
);
, expect);
try depTokenizer(
\\foo.o: \
\\
,
expect
);
, expect);
}
test "prereq" {
@ -572,15 +491,11 @@ test "prereq continuation" {
try depTokenizer(
\\foo.o: foo.h\
\\bar.h
,
expect
);
, expect);
try depTokenizer(
\\foo.o: foo.h\
\\bar.h
,
expect
);
, expect);
}
test "multiple prereqs" {
@ -907,14 +822,14 @@ test "error target - continuation expecting end-of-line" {
\\target = {foo.o}
\\ERROR: illegal char 'x' at position 8: continuation expecting end-of-line
);
try depTokenizer("foo.o: \\ x",
try depTokenizer("foo.o: \\\x0dx",
\\target = {foo.o}
\\ERROR: illegal char 'x' at position 9: continuation expecting end-of-line
);
}
test "error prereq - continuation expecting end-of-line" {
try depTokenizer("foo.o: foo.h\\ x",
try depTokenizer("foo.o: foo.h\\\x0dx",
\\target = {foo.o}
\\ERROR: illegal char 'x' at position 14: continuation expecting end-of-line
);

View File

@ -15,13 +15,14 @@ const self_hosted_main = @import("main.zig");
const Args = arg.Args;
const Flag = arg.Flag;
const errmsg = @import("errmsg.zig");
const DepTokenizer = @import("dep_tokenizer.zig").Tokenizer;
var stderr_file: fs.File = undefined;
var stderr: *io.OutStream(fs.File.WriteError) = undefined;
var stdout: *io.OutStream(fs.File.WriteError) = undefined;
comptime {
_ = @import("dep_tokenizer.zig");
_ = @import("dep_tokenizer.zig");
}
// ABI warning
@ -397,3 +398,60 @@ fn printErrMsgToFile(
try stream.writeByteNTimes('~', last_token.end - first_token.start);
try stream.write("\n");
}
export fn stage2_DepTokenizer_init(input: [*]const u8, len: usize) stage2_DepTokenizer {
const t = std.heap.c_allocator.create(DepTokenizer) catch @panic("failed to create .d tokenizer");
t.* = DepTokenizer.init(std.heap.c_allocator, input[0..len]);
return stage2_DepTokenizer{
.handle = t,
};
}
export fn stage2_DepTokenizer_deinit(self: *stage2_DepTokenizer) void {
self.handle.deinit();
}
export fn stage2_DepTokenizer_next(self: *stage2_DepTokenizer) stage2_DepNextResult {
const otoken = self.handle.next() catch {
const textz = std.Buffer.init(&self.handle.arena.allocator, self.handle.error_text) catch @panic("failed to create .d tokenizer error text");
return stage2_DepNextResult{
.type_id = .error_,
.textz = textz.toSlice().ptr,
};
};
const token = otoken orelse {
return stage2_DepNextResult{
.type_id = .null_,
.textz = undefined,
};
};
const textz = std.Buffer.init(&self.handle.arena.allocator, token.bytes) catch @panic("failed to create .d tokenizer token text");
return stage2_DepNextResult{
.type_id = switch (token.id) {
.target => stage2_DepNextResult.TypeId.target,
.prereq => stage2_DepNextResult.TypeId.prereq,
},
.textz = textz.toSlice().ptr,
};
}
export const stage2_DepTokenizer = extern struct {
handle: *DepTokenizer,
};
export const stage2_DepNextResult = extern struct {
type_id: TypeId,
// when type_id == error --> error text
// when type_id == null --> undefined
// when type_id == target --> target pathname
// when type_id == prereq --> prereq pathname
textz: [*]const u8,
export const TypeId = extern enum {
error_,
null_,
target,
prereq,
};
};