2018-01-19 08:03:20 +00:00
|
|
|
const builtin = @import("builtin");
|
2017-11-07 08:22:27 +00:00
|
|
|
const std = @import("std");
|
|
|
|
const io = std.io;
|
|
|
|
const os = std.os;
|
2018-01-17 04:19:05 +00:00
|
|
|
const warn = std.debug.warn;
|
|
|
|
const mem = std.mem;
|
2018-01-19 08:03:20 +00:00
|
|
|
const assert = std.debug.assert;
|
2018-01-17 04:19:05 +00:00
|
|
|
|
2018-01-17 06:50:35 +00:00
|
|
|
const max_doc_file_size = 10 * 1024 * 1024;
|
|
|
|
|
|
|
|
const exe_ext = std.build.Target(std.build.Target.Native).exeFileExt();
|
2018-01-19 08:03:20 +00:00
|
|
|
const obj_ext = std.build.Target(std.build.Target.Native).oFileExt();
|
|
|
|
const tmp_dir_name = "docgen_tmp";
|
2017-11-07 08:22:27 +00:00
|
|
|
|
|
|
|
pub fn main() -> %void {
|
|
|
|
// TODO use a more general purpose allocator here
|
2018-01-17 04:19:05 +00:00
|
|
|
var inc_allocator = try std.heap.IncrementingAllocator.init(max_doc_file_size);
|
2017-11-07 08:22:27 +00:00
|
|
|
defer inc_allocator.deinit();
|
|
|
|
const allocator = &inc_allocator.allocator;
|
|
|
|
|
|
|
|
var args_it = os.args();
|
|
|
|
|
|
|
|
if (!args_it.skip()) @panic("expected self arg");
|
|
|
|
|
2018-01-17 06:50:35 +00:00
|
|
|
const zig_exe = try (args_it.next(allocator) ?? @panic("expected zig exe arg"));
|
|
|
|
defer allocator.free(zig_exe);
|
|
|
|
|
2018-01-09 05:07:01 +00:00
|
|
|
const in_file_name = try (args_it.next(allocator) ?? @panic("expected input arg"));
|
2017-11-07 08:22:27 +00:00
|
|
|
defer allocator.free(in_file_name);
|
|
|
|
|
2018-01-09 05:07:01 +00:00
|
|
|
const out_file_name = try (args_it.next(allocator) ?? @panic("expected output arg"));
|
2017-11-07 08:22:27 +00:00
|
|
|
defer allocator.free(out_file_name);
|
|
|
|
|
2018-01-09 05:07:01 +00:00
|
|
|
var in_file = try io.File.openRead(in_file_name, allocator);
|
2017-11-07 08:22:27 +00:00
|
|
|
defer in_file.close();
|
|
|
|
|
2018-01-09 05:07:01 +00:00
|
|
|
var out_file = try io.File.openWrite(out_file_name, allocator);
|
2017-11-07 08:22:27 +00:00
|
|
|
defer out_file.close();
|
|
|
|
|
|
|
|
var file_in_stream = io.FileInStream.init(&in_file);
|
2018-01-17 04:19:05 +00:00
|
|
|
|
|
|
|
const input_file_bytes = try file_in_stream.stream.readAllAlloc(allocator, max_doc_file_size);
|
2017-11-07 08:22:27 +00:00
|
|
|
|
|
|
|
var file_out_stream = io.FileOutStream.init(&out_file);
|
|
|
|
var buffered_out_stream = io.BufferedOutStream.init(&file_out_stream.stream);
|
|
|
|
|
2018-01-17 05:22:33 +00:00
|
|
|
var tokenizer = Tokenizer.init(in_file_name, input_file_bytes);
|
|
|
|
var toc = try genToc(allocator, &tokenizer);
|
2017-11-07 08:22:27 +00:00
|
|
|
|
2018-01-19 08:03:20 +00:00
|
|
|
try os.makePath(allocator, tmp_dir_name);
|
2018-01-19 20:51:37 +00:00
|
|
|
defer {
|
|
|
|
// TODO issue #709
|
|
|
|
// disabled to pass CI tests, but obviously we want to implement this
|
|
|
|
// and then remove this workaround
|
|
|
|
if (builtin.os == builtin.Os.linux) {
|
|
|
|
os.deleteTree(allocator, tmp_dir_name) catch {};
|
|
|
|
}
|
|
|
|
}
|
2018-01-17 06:50:35 +00:00
|
|
|
try genHtml(allocator, &tokenizer, &toc, &buffered_out_stream.stream, zig_exe);
|
2018-01-17 04:19:05 +00:00
|
|
|
try buffered_out_stream.flush();
|
2017-11-07 08:22:27 +00:00
|
|
|
}
|
|
|
|
|
2018-01-17 04:19:05 +00:00
|
|
|
const Token = struct {
|
|
|
|
id: Id,
|
|
|
|
start: usize,
|
|
|
|
end: usize,
|
|
|
|
|
|
|
|
const Id = enum {
|
|
|
|
Invalid,
|
|
|
|
Content,
|
|
|
|
BracketOpen,
|
|
|
|
TagContent,
|
|
|
|
Separator,
|
|
|
|
BracketClose,
|
|
|
|
Eof,
|
|
|
|
};
|
2017-11-07 08:22:27 +00:00
|
|
|
};
|
|
|
|
|
2018-01-17 04:19:05 +00:00
|
|
|
const Tokenizer = struct {
|
|
|
|
buffer: []const u8,
|
|
|
|
index: usize,
|
|
|
|
state: State,
|
|
|
|
source_file_name: []const u8,
|
2018-01-19 08:03:20 +00:00
|
|
|
code_node_count: usize,
|
2017-11-07 08:22:27 +00:00
|
|
|
|
2018-01-17 04:19:05 +00:00
|
|
|
const State = enum {
|
|
|
|
Start,
|
|
|
|
LBracket,
|
|
|
|
Hash,
|
|
|
|
TagName,
|
|
|
|
Eof,
|
|
|
|
};
|
|
|
|
|
|
|
|
fn init(source_file_name: []const u8, buffer: []const u8) -> Tokenizer {
|
|
|
|
return Tokenizer {
|
|
|
|
.buffer = buffer,
|
|
|
|
.index = 0,
|
|
|
|
.state = State.Start,
|
|
|
|
.source_file_name = source_file_name,
|
2018-01-19 08:03:20 +00:00
|
|
|
.code_node_count = 0,
|
2018-01-17 04:19:05 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
fn next(self: &Tokenizer) -> Token {
|
|
|
|
var result = Token {
|
|
|
|
.id = Token.Id.Eof,
|
|
|
|
.start = self.index,
|
|
|
|
.end = undefined,
|
2017-11-07 08:22:27 +00:00
|
|
|
};
|
2018-01-17 04:19:05 +00:00
|
|
|
while (self.index < self.buffer.len) : (self.index += 1) {
|
|
|
|
const c = self.buffer[self.index];
|
|
|
|
switch (self.state) {
|
|
|
|
State.Start => switch (c) {
|
|
|
|
'{' => {
|
|
|
|
self.state = State.LBracket;
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
result.id = Token.Id.Content;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
State.LBracket => switch (c) {
|
|
|
|
'#' => {
|
|
|
|
if (result.id != Token.Id.Eof) {
|
|
|
|
self.index -= 1;
|
|
|
|
self.state = State.Start;
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
result.id = Token.Id.BracketOpen;
|
|
|
|
self.index += 1;
|
|
|
|
self.state = State.TagName;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
result.id = Token.Id.Content;
|
|
|
|
self.state = State.Start;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
State.TagName => switch (c) {
|
|
|
|
'|' => {
|
|
|
|
if (result.id != Token.Id.Eof) {
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
result.id = Token.Id.Separator;
|
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
'#' => {
|
|
|
|
self.state = State.Hash;
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
result.id = Token.Id.TagContent;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
State.Hash => switch (c) {
|
|
|
|
'}' => {
|
|
|
|
if (result.id != Token.Id.Eof) {
|
|
|
|
self.index -= 1;
|
|
|
|
self.state = State.TagName;
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
result.id = Token.Id.BracketClose;
|
|
|
|
self.index += 1;
|
|
|
|
self.state = State.Start;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
result.id = Token.Id.TagContent;
|
|
|
|
self.state = State.TagName;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
State.Eof => unreachable,
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
switch (self.state) {
|
|
|
|
State.Start, State.LBracket, State.Eof => {},
|
2017-11-07 08:22:27 +00:00
|
|
|
else => {
|
2018-01-17 04:19:05 +00:00
|
|
|
result.id = Token.Id.Invalid;
|
2017-11-07 08:22:27 +00:00
|
|
|
},
|
2018-01-17 04:19:05 +00:00
|
|
|
}
|
|
|
|
self.state = State.Eof;
|
|
|
|
}
|
|
|
|
result.end = self.index;
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
const Location = struct {
|
|
|
|
line: usize,
|
|
|
|
column: usize,
|
|
|
|
line_start: usize,
|
|
|
|
line_end: usize,
|
|
|
|
};
|
|
|
|
|
|
|
|
fn getTokenLocation(self: &Tokenizer, token: &const Token) -> Location {
|
|
|
|
var loc = Location {
|
|
|
|
.line = 0,
|
|
|
|
.column = 0,
|
|
|
|
.line_start = 0,
|
|
|
|
.line_end = 0,
|
|
|
|
};
|
|
|
|
for (self.buffer) |c, i| {
|
|
|
|
if (i == token.start) {
|
|
|
|
loc.line_end = i;
|
|
|
|
while (loc.line_end < self.buffer.len and self.buffer[loc.line_end] != '\n') : (loc.line_end += 1) {}
|
|
|
|
return loc;
|
|
|
|
}
|
|
|
|
if (c == '\n') {
|
|
|
|
loc.line += 1;
|
|
|
|
loc.column = 0;
|
|
|
|
loc.line_start = i + 1;
|
|
|
|
} else {
|
|
|
|
loc.column += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return loc;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
error ParseError;
|
|
|
|
|
|
|
|
fn parseError(tokenizer: &Tokenizer, token: &const Token, comptime fmt: []const u8, args: ...) -> error {
|
|
|
|
const loc = tokenizer.getTokenLocation(token);
|
|
|
|
warn("{}:{}:{}: error: " ++ fmt ++ "\n", tokenizer.source_file_name, loc.line + 1, loc.column + 1, args);
|
|
|
|
if (loc.line_start <= loc.line_end) {
|
|
|
|
warn("{}\n", tokenizer.buffer[loc.line_start..loc.line_end]);
|
|
|
|
{
|
|
|
|
var i: usize = 0;
|
|
|
|
while (i < loc.column) : (i += 1) {
|
|
|
|
warn(" ");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
{
|
|
|
|
const caret_count = token.end - token.start;
|
|
|
|
var i: usize = 0;
|
|
|
|
while (i < caret_count) : (i += 1) {
|
|
|
|
warn("~");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
warn("\n");
|
|
|
|
}
|
|
|
|
return error.ParseError;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn assertToken(tokenizer: &Tokenizer, token: &const Token, id: Token.Id) -> %void {
|
|
|
|
if (token.id != id) {
|
|
|
|
return parseError(tokenizer, token, "expected {}, found {}", @tagName(id), @tagName(token.id));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn eatToken(tokenizer: &Tokenizer, id: Token.Id) -> %Token {
|
|
|
|
const token = tokenizer.next();
|
|
|
|
try assertToken(tokenizer, token, id);
|
|
|
|
return token;
|
|
|
|
}
|
|
|
|
|
|
|
|
const HeaderOpen = struct {
|
|
|
|
name: []const u8,
|
|
|
|
url: []const u8,
|
|
|
|
n: usize,
|
|
|
|
};
|
|
|
|
|
2018-01-17 05:22:33 +00:00
|
|
|
const SeeAlsoItem = struct {
|
|
|
|
name: []const u8,
|
|
|
|
token: Token,
|
2018-01-17 04:19:05 +00:00
|
|
|
};
|
|
|
|
|
2018-01-19 08:03:20 +00:00
|
|
|
const ExpectedOutcome = enum {
|
|
|
|
Succeed,
|
|
|
|
Fail,
|
|
|
|
};
|
|
|
|
|
2018-01-17 06:50:35 +00:00
|
|
|
const Code = struct {
|
|
|
|
id: Id,
|
|
|
|
name: []const u8,
|
|
|
|
source_token: Token,
|
2018-01-19 08:03:20 +00:00
|
|
|
is_inline: bool,
|
|
|
|
mode: builtin.Mode,
|
|
|
|
link_objects: []const []const u8,
|
|
|
|
target_windows: bool,
|
|
|
|
link_libc: bool,
|
2018-01-17 06:50:35 +00:00
|
|
|
|
2018-01-19 08:03:20 +00:00
|
|
|
const Id = union(enum) {
|
2018-01-17 06:50:35 +00:00
|
|
|
Test,
|
2018-01-19 08:03:20 +00:00
|
|
|
TestError: []const u8,
|
|
|
|
TestSafety: []const u8,
|
|
|
|
Exe: ExpectedOutcome,
|
|
|
|
Obj,
|
2018-01-17 06:50:35 +00:00
|
|
|
};
|
|
|
|
};
|
|
|
|
|
2018-01-17 04:19:05 +00:00
|
|
|
const Node = union(enum) {
|
|
|
|
Content: []const u8,
|
|
|
|
Nav,
|
|
|
|
HeaderOpen: HeaderOpen,
|
2018-01-17 05:22:33 +00:00
|
|
|
SeeAlso: []const SeeAlsoItem,
|
2018-01-17 06:50:35 +00:00
|
|
|
Code: Code,
|
2018-01-17 04:19:05 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
const Toc = struct {
|
|
|
|
nodes: []Node,
|
|
|
|
toc: []u8,
|
2018-01-17 05:22:33 +00:00
|
|
|
urls: std.HashMap([]const u8, Token, mem.hash_slice_u8, mem.eql_slice_u8),
|
2018-01-17 04:19:05 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
const Action = enum {
|
|
|
|
Open,
|
|
|
|
Close,
|
|
|
|
};
|
|
|
|
|
2018-01-17 05:22:33 +00:00
|
|
|
fn genToc(allocator: &mem.Allocator, tokenizer: &Tokenizer) -> %Toc {
|
2018-01-17 04:19:05 +00:00
|
|
|
var urls = std.HashMap([]const u8, Token, mem.hash_slice_u8, mem.eql_slice_u8).init(allocator);
|
2018-01-17 05:22:33 +00:00
|
|
|
%defer urls.deinit();
|
2018-01-17 04:19:05 +00:00
|
|
|
|
|
|
|
var header_stack_size: usize = 0;
|
|
|
|
var last_action = Action.Open;
|
|
|
|
|
|
|
|
var toc_buf = try std.Buffer.initSize(allocator, 0);
|
|
|
|
defer toc_buf.deinit();
|
|
|
|
|
|
|
|
var toc_buf_adapter = io.BufferOutStream.init(&toc_buf);
|
|
|
|
var toc = &toc_buf_adapter.stream;
|
|
|
|
|
|
|
|
var nodes = std.ArrayList(Node).init(allocator);
|
|
|
|
defer nodes.deinit();
|
|
|
|
|
|
|
|
try toc.writeByte('\n');
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
const token = tokenizer.next();
|
|
|
|
switch (token.id) {
|
|
|
|
Token.Id.Eof => {
|
|
|
|
if (header_stack_size != 0) {
|
2018-01-17 05:22:33 +00:00
|
|
|
return parseError(tokenizer, token, "unbalanced headers");
|
2018-01-17 04:19:05 +00:00
|
|
|
}
|
|
|
|
try toc.write(" </ul>\n");
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
Token.Id.Content => {
|
2018-01-17 05:22:33 +00:00
|
|
|
try nodes.append(Node {.Content = tokenizer.buffer[token.start..token.end] });
|
2018-01-17 04:19:05 +00:00
|
|
|
},
|
|
|
|
Token.Id.BracketOpen => {
|
2018-01-17 05:22:33 +00:00
|
|
|
const tag_token = try eatToken(tokenizer, Token.Id.TagContent);
|
|
|
|
const tag_name = tokenizer.buffer[tag_token.start..tag_token.end];
|
2018-01-17 04:19:05 +00:00
|
|
|
|
|
|
|
if (mem.eql(u8, tag_name, "nav")) {
|
2018-01-17 06:50:35 +00:00
|
|
|
_ = try eatToken(tokenizer, Token.Id.BracketClose);
|
2018-01-17 05:22:33 +00:00
|
|
|
|
|
|
|
try nodes.append(Node.Nav);
|
2018-01-17 04:19:05 +00:00
|
|
|
} else if (mem.eql(u8, tag_name, "header_open")) {
|
2018-01-17 06:50:35 +00:00
|
|
|
_ = try eatToken(tokenizer, Token.Id.Separator);
|
2018-01-17 05:22:33 +00:00
|
|
|
const content_token = try eatToken(tokenizer, Token.Id.TagContent);
|
|
|
|
const content = tokenizer.buffer[content_token.start..content_token.end];
|
2018-01-17 06:50:35 +00:00
|
|
|
_ = try eatToken(tokenizer, Token.Id.BracketClose);
|
2018-01-17 05:22:33 +00:00
|
|
|
|
2018-01-17 04:19:05 +00:00
|
|
|
header_stack_size += 1;
|
2018-01-17 05:22:33 +00:00
|
|
|
|
|
|
|
const urlized = try urlize(allocator, content);
|
|
|
|
try nodes.append(Node{.HeaderOpen = HeaderOpen {
|
|
|
|
.name = content,
|
|
|
|
.url = urlized,
|
|
|
|
.n = header_stack_size,
|
|
|
|
}});
|
|
|
|
if (try urls.put(urlized, tag_token)) |other_tag_token| {
|
|
|
|
parseError(tokenizer, tag_token, "duplicate header url: #{}", urlized) catch {};
|
|
|
|
parseError(tokenizer, other_tag_token, "other tag here") catch {};
|
|
|
|
return error.ParseError;
|
|
|
|
}
|
|
|
|
if (last_action == Action.Open) {
|
|
|
|
try toc.writeByte('\n');
|
|
|
|
try toc.writeByteNTimes(' ', header_stack_size * 4);
|
|
|
|
try toc.write("<ul>\n");
|
|
|
|
} else {
|
|
|
|
last_action = Action.Open;
|
|
|
|
}
|
|
|
|
try toc.writeByteNTimes(' ', 4 + header_stack_size * 4);
|
|
|
|
try toc.print("<li><a href=\"#{}\">{}</a>", urlized, content);
|
2018-01-17 04:19:05 +00:00
|
|
|
} else if (mem.eql(u8, tag_name, "header_close")) {
|
|
|
|
if (header_stack_size == 0) {
|
2018-01-17 05:22:33 +00:00
|
|
|
return parseError(tokenizer, tag_token, "unbalanced close header");
|
2018-01-17 04:19:05 +00:00
|
|
|
}
|
|
|
|
header_stack_size -= 1;
|
2018-01-17 06:50:35 +00:00
|
|
|
_ = try eatToken(tokenizer, Token.Id.BracketClose);
|
2018-01-17 05:22:33 +00:00
|
|
|
|
|
|
|
if (last_action == Action.Close) {
|
|
|
|
try toc.writeByteNTimes(' ', 8 + header_stack_size * 4);
|
|
|
|
try toc.write("</ul></li>\n");
|
|
|
|
} else {
|
|
|
|
try toc.write("</li>\n");
|
|
|
|
last_action = Action.Close;
|
|
|
|
}
|
|
|
|
} else if (mem.eql(u8, tag_name, "see_also")) {
|
|
|
|
var list = std.ArrayList(SeeAlsoItem).init(allocator);
|
|
|
|
%defer list.deinit();
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
const see_also_tok = tokenizer.next();
|
|
|
|
switch (see_also_tok.id) {
|
|
|
|
Token.Id.TagContent => {
|
|
|
|
const content = tokenizer.buffer[see_also_tok.start..see_also_tok.end];
|
|
|
|
try list.append(SeeAlsoItem {
|
|
|
|
.name = content,
|
|
|
|
.token = see_also_tok,
|
|
|
|
});
|
|
|
|
},
|
|
|
|
Token.Id.Separator => {},
|
|
|
|
Token.Id.BracketClose => {
|
|
|
|
try nodes.append(Node {.SeeAlso = list.toOwnedSlice() } );
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
else => return parseError(tokenizer, see_also_tok, "invalid see_also token"),
|
2018-01-17 04:19:05 +00:00
|
|
|
}
|
2018-01-17 05:22:33 +00:00
|
|
|
}
|
2018-01-17 06:50:35 +00:00
|
|
|
} else if (mem.eql(u8, tag_name, "code_begin")) {
|
|
|
|
_ = try eatToken(tokenizer, Token.Id.Separator);
|
|
|
|
const code_kind_tok = try eatToken(tokenizer, Token.Id.TagContent);
|
|
|
|
var name: []const u8 = "test";
|
|
|
|
const maybe_sep = tokenizer.next();
|
|
|
|
switch (maybe_sep.id) {
|
|
|
|
Token.Id.Separator => {
|
|
|
|
const name_tok = try eatToken(tokenizer, Token.Id.TagContent);
|
|
|
|
name = tokenizer.buffer[name_tok.start..name_tok.end];
|
|
|
|
_ = try eatToken(tokenizer, Token.Id.BracketClose);
|
|
|
|
},
|
|
|
|
Token.Id.BracketClose => {},
|
|
|
|
else => return parseError(tokenizer, token, "invalid token"),
|
|
|
|
}
|
|
|
|
const code_kind_str = tokenizer.buffer[code_kind_tok.start..code_kind_tok.end];
|
|
|
|
var code_kind_id: Code.Id = undefined;
|
2018-01-19 08:03:20 +00:00
|
|
|
var is_inline = false;
|
2018-01-17 06:50:35 +00:00
|
|
|
if (mem.eql(u8, code_kind_str, "exe")) {
|
2018-01-19 08:03:20 +00:00
|
|
|
code_kind_id = Code.Id { .Exe = ExpectedOutcome.Succeed };
|
|
|
|
} else if (mem.eql(u8, code_kind_str, "exe_err")) {
|
|
|
|
code_kind_id = Code.Id { .Exe = ExpectedOutcome.Fail };
|
2018-01-17 06:50:35 +00:00
|
|
|
} else if (mem.eql(u8, code_kind_str, "test")) {
|
|
|
|
code_kind_id = Code.Id.Test;
|
2018-01-19 08:03:20 +00:00
|
|
|
} else if (mem.eql(u8, code_kind_str, "test_err")) {
|
|
|
|
code_kind_id = Code.Id { .TestError = name};
|
|
|
|
name = "test";
|
|
|
|
} else if (mem.eql(u8, code_kind_str, "test_safety")) {
|
|
|
|
code_kind_id = Code.Id { .TestSafety = name};
|
|
|
|
name = "test";
|
|
|
|
} else if (mem.eql(u8, code_kind_str, "obj")) {
|
|
|
|
code_kind_id = Code.Id.Obj;
|
|
|
|
} else if (mem.eql(u8, code_kind_str, "syntax")) {
|
|
|
|
code_kind_id = Code.Id.Obj;
|
|
|
|
is_inline = true;
|
2018-01-17 06:50:35 +00:00
|
|
|
} else {
|
|
|
|
return parseError(tokenizer, code_kind_tok, "unrecognized code kind: {}", code_kind_str);
|
|
|
|
}
|
2018-01-19 08:03:20 +00:00
|
|
|
|
|
|
|
var mode = builtin.Mode.Debug;
|
|
|
|
var link_objects = std.ArrayList([]const u8).init(allocator);
|
|
|
|
defer link_objects.deinit();
|
|
|
|
var target_windows = false;
|
|
|
|
var link_libc = false;
|
|
|
|
|
|
|
|
const source_token = while (true) {
|
|
|
|
const content_tok = try eatToken(tokenizer, Token.Id.Content);
|
|
|
|
_ = try eatToken(tokenizer, Token.Id.BracketOpen);
|
|
|
|
const end_code_tag = try eatToken(tokenizer, Token.Id.TagContent);
|
|
|
|
const end_tag_name = tokenizer.buffer[end_code_tag.start..end_code_tag.end];
|
|
|
|
if (mem.eql(u8, end_tag_name, "code_release_fast")) {
|
|
|
|
mode = builtin.Mode.ReleaseFast;
|
|
|
|
} else if (mem.eql(u8, end_tag_name, "code_link_object")) {
|
|
|
|
_ = try eatToken(tokenizer, Token.Id.Separator);
|
|
|
|
const obj_tok = try eatToken(tokenizer, Token.Id.TagContent);
|
|
|
|
try link_objects.append(tokenizer.buffer[obj_tok.start..obj_tok.end]);
|
|
|
|
} else if (mem.eql(u8, end_tag_name, "target_windows")) {
|
|
|
|
target_windows = true;
|
|
|
|
} else if (mem.eql(u8, end_tag_name, "link_libc")) {
|
|
|
|
link_libc = true;
|
|
|
|
} else if (mem.eql(u8, end_tag_name, "code_end")) {
|
|
|
|
_ = try eatToken(tokenizer, Token.Id.BracketClose);
|
|
|
|
break content_tok;
|
|
|
|
} else {
|
|
|
|
return parseError(tokenizer, end_code_tag, "invalid token inside code_begin: {}", end_tag_name);
|
|
|
|
}
|
|
|
|
_ = try eatToken(tokenizer, Token.Id.BracketClose);
|
|
|
|
} else unreachable; // TODO issue #707
|
|
|
|
try nodes.append(Node {.Code = Code {
|
2018-01-17 06:50:35 +00:00
|
|
|
.id = code_kind_id,
|
|
|
|
.name = name,
|
|
|
|
.source_token = source_token,
|
2018-01-19 08:03:20 +00:00
|
|
|
.is_inline = is_inline,
|
|
|
|
.mode = mode,
|
|
|
|
.link_objects = link_objects.toOwnedSlice(),
|
|
|
|
.target_windows = target_windows,
|
|
|
|
.link_libc = link_libc,
|
2018-01-17 06:50:35 +00:00
|
|
|
}});
|
2018-01-19 08:03:20 +00:00
|
|
|
tokenizer.code_node_count += 1;
|
2018-01-17 05:22:33 +00:00
|
|
|
} else {
|
|
|
|
return parseError(tokenizer, tag_token, "unrecognized tag name: {}", tag_name);
|
2018-01-17 04:19:05 +00:00
|
|
|
}
|
|
|
|
},
|
2018-01-17 05:22:33 +00:00
|
|
|
else => return parseError(tokenizer, token, "invalid token"),
|
2018-01-17 04:19:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return Toc {
|
|
|
|
.nodes = nodes.toOwnedSlice(),
|
|
|
|
.toc = toc_buf.toOwnedSlice(),
|
2018-01-17 05:22:33 +00:00
|
|
|
.urls = urls,
|
2018-01-17 04:19:05 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
fn urlize(allocator: &mem.Allocator, input: []const u8) -> %[]u8 {
|
|
|
|
var buf = try std.Buffer.initSize(allocator, 0);
|
|
|
|
defer buf.deinit();
|
|
|
|
|
|
|
|
var buf_adapter = io.BufferOutStream.init(&buf);
|
|
|
|
var out = &buf_adapter.stream;
|
|
|
|
for (input) |c| {
|
|
|
|
switch (c) {
|
|
|
|
'a'...'z', 'A'...'Z', '_', '-' => {
|
|
|
|
try out.writeByte(c);
|
|
|
|
},
|
|
|
|
' ' => {
|
|
|
|
try out.writeByte('-');
|
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return buf.toOwnedSlice();
|
|
|
|
}
|
|
|
|
|
2018-01-17 06:50:35 +00:00
|
|
|
fn escapeHtml(allocator: &mem.Allocator, input: []const u8) -> %[]u8 {
|
|
|
|
var buf = try std.Buffer.initSize(allocator, 0);
|
|
|
|
defer buf.deinit();
|
|
|
|
|
|
|
|
var buf_adapter = io.BufferOutStream.init(&buf);
|
|
|
|
var out = &buf_adapter.stream;
|
|
|
|
for (input) |c| {
|
|
|
|
try switch (c) {
|
|
|
|
'&' => out.write("&"),
|
|
|
|
'<' => out.write("<"),
|
|
|
|
'>' => out.write(">"),
|
|
|
|
'"' => out.write("""),
|
|
|
|
else => out.writeByte(c),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
return buf.toOwnedSlice();
|
|
|
|
}
|
|
|
|
|
2018-01-19 08:03:20 +00:00
|
|
|
//#define VT_RED "\x1b[31;1m"
|
|
|
|
//#define VT_GREEN "\x1b[32;1m"
|
|
|
|
//#define VT_CYAN "\x1b[36;1m"
|
|
|
|
//#define VT_WHITE "\x1b[37;1m"
|
|
|
|
//#define VT_BOLD "\x1b[0;1m"
|
|
|
|
//#define VT_RESET "\x1b[0m"
|
|
|
|
|
|
|
|
const TermState = enum {
|
|
|
|
Start,
|
|
|
|
Escape,
|
|
|
|
LBracket,
|
|
|
|
Number,
|
|
|
|
AfterNumber,
|
|
|
|
Arg,
|
|
|
|
ArgNumber,
|
|
|
|
ExpectEnd,
|
|
|
|
};
|
|
|
|
|
|
|
|
error UnsupportedEscape;
|
|
|
|
|
|
|
|
test "term color" {
|
|
|
|
const input_bytes = "A\x1b[32;1mgreen\x1b[0mB";
|
|
|
|
const result = try termColor(std.debug.global_allocator, input_bytes);
|
|
|
|
assert(mem.eql(u8, result, "A<span class=\"t32\">green</span>B"));
|
|
|
|
}
|
|
|
|
|
|
|
|
fn termColor(allocator: &mem.Allocator, input: []const u8) -> %[]u8 {
|
|
|
|
var buf = try std.Buffer.initSize(allocator, 0);
|
|
|
|
defer buf.deinit();
|
|
|
|
|
|
|
|
var buf_adapter = io.BufferOutStream.init(&buf);
|
|
|
|
var out = &buf_adapter.stream;
|
|
|
|
var number_start_index: usize = undefined;
|
|
|
|
var first_number: usize = undefined;
|
|
|
|
var second_number: usize = undefined;
|
|
|
|
var i: usize = 0;
|
|
|
|
var state = TermState.Start;
|
|
|
|
var open_span_count: usize = 0;
|
|
|
|
while (i < input.len) : (i += 1) {
|
|
|
|
const c = input[i];
|
|
|
|
switch (state) {
|
|
|
|
TermState.Start => switch (c) {
|
|
|
|
'\x1b' => state = TermState.Escape,
|
|
|
|
else => try out.writeByte(c),
|
|
|
|
},
|
|
|
|
TermState.Escape => switch (c) {
|
|
|
|
'[' => state = TermState.LBracket,
|
|
|
|
else => return error.UnsupportedEscape,
|
|
|
|
},
|
|
|
|
TermState.LBracket => switch (c) {
|
|
|
|
'0'...'9' => {
|
|
|
|
number_start_index = i;
|
|
|
|
state = TermState.Number;
|
|
|
|
},
|
|
|
|
else => return error.UnsupportedEscape,
|
|
|
|
},
|
|
|
|
TermState.Number => switch (c) {
|
|
|
|
'0'...'9' => {},
|
|
|
|
else => {
|
|
|
|
first_number = std.fmt.parseInt(usize, input[number_start_index..i], 10) catch unreachable;
|
|
|
|
second_number = 0;
|
|
|
|
state = TermState.AfterNumber;
|
|
|
|
i -= 1;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
|
|
|
|
TermState.AfterNumber => switch (c) {
|
|
|
|
';' => state = TermState.Arg,
|
|
|
|
else => {
|
|
|
|
state = TermState.ExpectEnd;
|
|
|
|
i -= 1;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
TermState.Arg => switch (c) {
|
|
|
|
'0'...'9' => {
|
|
|
|
number_start_index = i;
|
|
|
|
state = TermState.ArgNumber;
|
|
|
|
},
|
|
|
|
else => return error.UnsupportedEscape,
|
|
|
|
},
|
|
|
|
TermState.ArgNumber => switch (c) {
|
|
|
|
'0'...'9' => {},
|
|
|
|
else => {
|
|
|
|
second_number = std.fmt.parseInt(usize, input[number_start_index..i], 10) catch unreachable;
|
|
|
|
state = TermState.ExpectEnd;
|
|
|
|
i -= 1;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
TermState.ExpectEnd => switch (c) {
|
|
|
|
'm' => {
|
|
|
|
state = TermState.Start;
|
|
|
|
while (open_span_count != 0) : (open_span_count -= 1) {
|
|
|
|
try out.write("</span>");
|
|
|
|
}
|
|
|
|
if (first_number != 0 or second_number != 0) {
|
|
|
|
try out.print("<span class=\"t{}_{}\">", first_number, second_number);
|
|
|
|
open_span_count += 1;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
else => return error.UnsupportedEscape,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return buf.toOwnedSlice();
|
|
|
|
}
|
|
|
|
|
2018-01-17 06:50:35 +00:00
|
|
|
error ExampleFailedToCompile;
|
|
|
|
|
|
|
|
fn genHtml(allocator: &mem.Allocator, tokenizer: &Tokenizer, toc: &Toc, out: &io.OutStream, zig_exe: []const u8) -> %void {
|
2018-01-19 08:03:20 +00:00
|
|
|
var code_progress_index: usize = 0;
|
2018-01-17 04:19:05 +00:00
|
|
|
for (toc.nodes) |node| {
|
|
|
|
switch (node) {
|
|
|
|
Node.Content => |data| {
|
|
|
|
try out.write(data);
|
|
|
|
},
|
|
|
|
Node.Nav => {
|
|
|
|
try out.write(toc.toc);
|
|
|
|
},
|
|
|
|
Node.HeaderOpen => |info| {
|
|
|
|
try out.print("<h{} id=\"{}\">{}</h{}>\n", info.n, info.url, info.name, info.n);
|
2017-11-07 08:22:27 +00:00
|
|
|
},
|
2018-01-17 05:22:33 +00:00
|
|
|
Node.SeeAlso => |items| {
|
|
|
|
try out.write("<p>See also:</p><ul>\n");
|
|
|
|
for (items) |item| {
|
|
|
|
const url = try urlize(allocator, item.name);
|
|
|
|
if (!toc.urls.contains(url)) {
|
|
|
|
return parseError(tokenizer, item.token, "url not found: {}", url);
|
|
|
|
}
|
|
|
|
try out.print("<li><a href=\"#{}\">{}</a></li>\n", url, item.name);
|
|
|
|
}
|
|
|
|
try out.write("</ul>\n");
|
|
|
|
},
|
2018-01-17 06:50:35 +00:00
|
|
|
Node.Code => |code| {
|
2018-01-19 08:03:20 +00:00
|
|
|
code_progress_index += 1;
|
|
|
|
warn("docgen example code {}/{}...", code_progress_index, tokenizer.code_node_count);
|
|
|
|
|
2018-01-17 06:50:35 +00:00
|
|
|
const raw_source = tokenizer.buffer[code.source_token.start..code.source_token.end];
|
|
|
|
const trimmed_raw_source = mem.trim(u8, raw_source, " \n");
|
|
|
|
const escaped_source = try escapeHtml(allocator, trimmed_raw_source);
|
2018-01-19 08:03:20 +00:00
|
|
|
if (!code.is_inline) {
|
|
|
|
try out.print("<p class=\"file\">{}.zig</p>", code.name);
|
|
|
|
}
|
2018-01-17 06:50:35 +00:00
|
|
|
try out.print("<pre><code class=\"zig\">{}</code></pre>", escaped_source);
|
|
|
|
const name_plus_ext = try std.fmt.allocPrint(allocator, "{}.zig", code.name);
|
|
|
|
const tmp_source_file_name = try os.path.join(allocator, tmp_dir_name, name_plus_ext);
|
|
|
|
try io.writeFile(tmp_source_file_name, trimmed_raw_source, null);
|
|
|
|
|
|
|
|
switch (code.id) {
|
2018-01-19 08:03:20 +00:00
|
|
|
Code.Id.Exe => |expected_outcome| {
|
|
|
|
const name_plus_bin_ext = try std.fmt.allocPrint(allocator, "{}{}", code.name, exe_ext);
|
|
|
|
const tmp_bin_file_name = try os.path.join(allocator, tmp_dir_name, name_plus_bin_ext);
|
|
|
|
var build_args = std.ArrayList([]const u8).init(allocator);
|
|
|
|
defer build_args.deinit();
|
|
|
|
try build_args.appendSlice([][]const u8 {zig_exe,
|
|
|
|
"build-exe", tmp_source_file_name,
|
|
|
|
"--output", tmp_bin_file_name,
|
|
|
|
});
|
|
|
|
try out.print("<pre><code class=\"shell\">$ zig build-exe {}.zig", code.name);
|
|
|
|
switch (code.mode) {
|
|
|
|
builtin.Mode.Debug => {},
|
|
|
|
builtin.Mode.ReleaseSafe => {
|
|
|
|
try build_args.append("--release-safe");
|
|
|
|
try out.print(" --release-safe");
|
|
|
|
},
|
|
|
|
builtin.Mode.ReleaseFast => {
|
|
|
|
try build_args.append("--release-fast");
|
|
|
|
try out.print(" --release-fast");
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for (code.link_objects) |link_object| {
|
|
|
|
const name_with_ext = try std.fmt.allocPrint(allocator, "{}{}", link_object, obj_ext);
|
|
|
|
const full_path_object = try os.path.join(allocator, tmp_dir_name, name_with_ext);
|
|
|
|
try build_args.append("--object");
|
|
|
|
try build_args.append(full_path_object);
|
|
|
|
try out.print(" --object {}", name_with_ext);
|
|
|
|
}
|
|
|
|
if (code.link_libc) {
|
|
|
|
try build_args.append("--library");
|
|
|
|
try build_args.append("c");
|
|
|
|
try out.print(" --library c");
|
|
|
|
}
|
|
|
|
_ = exec(allocator, build_args.toSliceConst()) catch return parseError(
|
|
|
|
tokenizer, code.source_token, "example failed to compile");
|
|
|
|
|
|
|
|
const run_args = [][]const u8 {tmp_bin_file_name};
|
|
|
|
|
|
|
|
const result = if (expected_outcome == ExpectedOutcome.Fail) blk: {
|
|
|
|
const result = try os.ChildProcess.exec(allocator, run_args, null, null, max_doc_file_size);
|
2018-01-17 06:50:35 +00:00
|
|
|
switch (result.term) {
|
|
|
|
os.ChildProcess.Term.Exited => |exit_code| {
|
2018-01-19 08:03:20 +00:00
|
|
|
if (exit_code == 0) {
|
|
|
|
warn("{}\nThe following command incorrectly succeeded:\n", result.stderr);
|
|
|
|
for (run_args) |arg| warn("{} ", arg) else warn("\n");
|
|
|
|
return parseError(tokenizer, code.source_token, "example incorrectly compiled");
|
2018-01-17 06:50:35 +00:00
|
|
|
}
|
|
|
|
},
|
2018-01-19 08:03:20 +00:00
|
|
|
else => {},
|
2018-01-17 06:50:35 +00:00
|
|
|
}
|
2018-01-19 08:03:20 +00:00
|
|
|
break :blk result;
|
|
|
|
} else blk: {
|
|
|
|
break :blk exec(allocator, run_args) catch return parseError(
|
|
|
|
tokenizer, code.source_token, "example crashed");
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
const escaped_stderr = try escapeHtml(allocator, result.stderr);
|
|
|
|
const escaped_stdout = try escapeHtml(allocator, result.stdout);
|
|
|
|
|
|
|
|
const colored_stderr = try termColor(allocator, escaped_stderr);
|
|
|
|
const colored_stdout = try termColor(allocator, escaped_stdout);
|
|
|
|
|
|
|
|
try out.print("\n$ ./{}\n{}{}</code></pre>\n", code.name, colored_stdout, colored_stderr);
|
|
|
|
},
|
|
|
|
Code.Id.Test => {
|
|
|
|
var test_args = std.ArrayList([]const u8).init(allocator);
|
|
|
|
defer test_args.deinit();
|
|
|
|
|
|
|
|
try test_args.appendSlice([][]const u8 {zig_exe, "test", tmp_source_file_name});
|
|
|
|
try out.print("<pre><code class=\"shell\">$ zig test {}.zig", code.name);
|
|
|
|
switch (code.mode) {
|
|
|
|
builtin.Mode.Debug => {},
|
|
|
|
builtin.Mode.ReleaseSafe => {
|
|
|
|
try test_args.append("--release-safe");
|
|
|
|
try out.print(" --release-safe");
|
|
|
|
},
|
|
|
|
builtin.Mode.ReleaseFast => {
|
|
|
|
try test_args.append("--release-fast");
|
|
|
|
try out.print(" --release-fast");
|
|
|
|
},
|
|
|
|
}
|
|
|
|
if (code.target_windows) {
|
|
|
|
try test_args.appendSlice([][]const u8{
|
|
|
|
"--target-os", "windows",
|
|
|
|
"--target-arch", "x86_64",
|
|
|
|
"--target-environ", "msvc",
|
|
|
|
});
|
|
|
|
}
|
|
|
|
const result = exec(allocator, test_args.toSliceConst()) catch return parseError(
|
|
|
|
tokenizer, code.source_token, "test failed");
|
|
|
|
const escaped_stderr = try escapeHtml(allocator, result.stderr);
|
|
|
|
const escaped_stdout = try escapeHtml(allocator, result.stdout);
|
|
|
|
try out.print("\n{}{}</code></pre>\n", escaped_stderr, escaped_stdout);
|
|
|
|
},
|
|
|
|
Code.Id.TestError => |error_match| {
|
|
|
|
var test_args = std.ArrayList([]const u8).init(allocator);
|
|
|
|
defer test_args.deinit();
|
|
|
|
|
|
|
|
try test_args.appendSlice([][]const u8 {zig_exe, "test", "--color", "on", tmp_source_file_name});
|
|
|
|
try out.print("<pre><code class=\"shell\">$ zig test {}.zig", code.name);
|
|
|
|
switch (code.mode) {
|
|
|
|
builtin.Mode.Debug => {},
|
|
|
|
builtin.Mode.ReleaseSafe => {
|
|
|
|
try test_args.append("--release-safe");
|
|
|
|
try out.print(" --release-safe");
|
|
|
|
},
|
|
|
|
builtin.Mode.ReleaseFast => {
|
|
|
|
try test_args.append("--release-fast");
|
|
|
|
try out.print(" --release-fast");
|
|
|
|
},
|
2018-01-17 06:50:35 +00:00
|
|
|
}
|
2018-01-19 08:03:20 +00:00
|
|
|
const result = try os.ChildProcess.exec(allocator, test_args.toSliceConst(), null, null, max_doc_file_size);
|
2018-01-17 06:50:35 +00:00
|
|
|
switch (result.term) {
|
|
|
|
os.ChildProcess.Term.Exited => |exit_code| {
|
2018-01-19 08:03:20 +00:00
|
|
|
if (exit_code == 0) {
|
|
|
|
warn("{}\nThe following command incorrectly succeeded:\n", result.stderr);
|
|
|
|
for (test_args.toSliceConst()) |arg| warn("{} ", arg) else warn("\n");
|
|
|
|
return parseError(tokenizer, code.source_token, "example incorrectly compiled");
|
2018-01-17 06:50:35 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
else => {
|
2018-01-19 08:03:20 +00:00
|
|
|
warn("{}\nThe following command crashed:\n", result.stderr);
|
|
|
|
for (test_args.toSliceConst()) |arg| warn("{} ", arg) else warn("\n");
|
|
|
|
return parseError(tokenizer, code.source_token, "example compile crashed");
|
2018-01-17 06:50:35 +00:00
|
|
|
},
|
|
|
|
}
|
2018-01-19 08:03:20 +00:00
|
|
|
if (mem.indexOf(u8, result.stderr, error_match) == null) {
|
|
|
|
warn("{}\nExpected to find '{}' in stderr", result.stderr, error_match);
|
|
|
|
return parseError(tokenizer, code.source_token, "example did not have expected compile error");
|
|
|
|
}
|
|
|
|
const escaped_stderr = try escapeHtml(allocator, result.stderr);
|
|
|
|
const colored_stderr = try termColor(allocator, escaped_stderr);
|
|
|
|
try out.print("\n{}</code></pre>\n", colored_stderr);
|
2018-01-17 06:50:35 +00:00
|
|
|
},
|
2018-01-19 08:03:20 +00:00
|
|
|
|
|
|
|
Code.Id.TestSafety => |error_match| {
|
|
|
|
var test_args = std.ArrayList([]const u8).init(allocator);
|
|
|
|
defer test_args.deinit();
|
|
|
|
|
|
|
|
try test_args.appendSlice([][]const u8 {zig_exe, "test", tmp_source_file_name});
|
|
|
|
switch (code.mode) {
|
|
|
|
builtin.Mode.Debug => {},
|
|
|
|
builtin.Mode.ReleaseSafe => try test_args.append("--release-safe"),
|
|
|
|
builtin.Mode.ReleaseFast => try test_args.append("--release-fast"),
|
|
|
|
}
|
|
|
|
|
|
|
|
const result = try os.ChildProcess.exec(allocator, test_args.toSliceConst(), null, null, max_doc_file_size);
|
|
|
|
switch (result.term) {
|
|
|
|
os.ChildProcess.Term.Exited => |exit_code| {
|
|
|
|
if (exit_code == 0) {
|
|
|
|
warn("{}\nThe following command incorrectly succeeded:\n", result.stderr);
|
|
|
|
for (test_args.toSliceConst()) |arg| warn("{} ", arg) else warn("\n");
|
|
|
|
return parseError(tokenizer, code.source_token, "example test incorrectly succeeded");
|
|
|
|
}
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
warn("{}\nThe following command crashed:\n", result.stderr);
|
|
|
|
for (test_args.toSliceConst()) |arg| warn("{} ", arg) else warn("\n");
|
|
|
|
return parseError(tokenizer, code.source_token, "example compile crashed");
|
|
|
|
},
|
|
|
|
}
|
|
|
|
if (mem.indexOf(u8, result.stderr, error_match) == null) {
|
|
|
|
warn("{}\nExpected to find '{}' in stderr", result.stderr, error_match);
|
|
|
|
return parseError(tokenizer, code.source_token, "example did not have expected debug safety error message");
|
|
|
|
}
|
|
|
|
const escaped_stderr = try escapeHtml(allocator, result.stderr);
|
|
|
|
const colored_stderr = try termColor(allocator, escaped_stderr);
|
|
|
|
try out.print("<pre><code class=\"shell\">$ zig test {}.zig\n{}</code></pre>\n", code.name, colored_stderr);
|
2018-01-17 06:50:35 +00:00
|
|
|
},
|
2018-01-19 08:03:20 +00:00
|
|
|
Code.Id.Obj => {
|
|
|
|
const name_plus_obj_ext = try std.fmt.allocPrint(allocator, "{}{}", code.name, obj_ext);
|
|
|
|
const tmp_obj_file_name = try os.path.join(allocator, tmp_dir_name, name_plus_obj_ext);
|
|
|
|
var build_args = std.ArrayList([]const u8).init(allocator);
|
|
|
|
defer build_args.deinit();
|
|
|
|
|
|
|
|
try build_args.appendSlice([][]const u8 {zig_exe, "build-obj", tmp_source_file_name,
|
|
|
|
"--output", tmp_obj_file_name});
|
|
|
|
|
|
|
|
if (!code.is_inline) {
|
|
|
|
try out.print("<pre><code class=\"shell\">$ zig build-obj {}.zig", code.name);
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (code.mode) {
|
|
|
|
builtin.Mode.Debug => {},
|
|
|
|
builtin.Mode.ReleaseSafe => {
|
|
|
|
try build_args.append("--release-safe");
|
|
|
|
if (!code.is_inline) {
|
|
|
|
try out.print(" --release-safe");
|
|
|
|
}
|
|
|
|
},
|
|
|
|
builtin.Mode.ReleaseFast => {
|
|
|
|
try build_args.append("--release-fast");
|
|
|
|
if (!code.is_inline) {
|
|
|
|
try out.print(" --release-fast");
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
_ = exec(allocator, build_args.toSliceConst()) catch return parseError(
|
|
|
|
tokenizer, code.source_token, "example failed to compile");
|
|
|
|
if (!code.is_inline) {
|
|
|
|
try out.print("</code></pre>\n");
|
|
|
|
}
|
2018-01-17 06:50:35 +00:00
|
|
|
},
|
|
|
|
}
|
2018-01-19 08:03:20 +00:00
|
|
|
warn("OK\n");
|
2018-01-17 06:50:35 +00:00
|
|
|
},
|
2017-11-07 08:22:27 +00:00
|
|
|
}
|
|
|
|
}
|
2018-01-17 04:19:05 +00:00
|
|
|
|
2017-11-07 08:22:27 +00:00
|
|
|
}
|
2018-01-19 08:03:20 +00:00
|
|
|
|
|
|
|
error ChildCrashed;
|
|
|
|
error ChildExitError;
|
|
|
|
|
|
|
|
fn exec(allocator: &mem.Allocator, args: []const []const u8) -> %os.ChildProcess.ExecResult {
|
|
|
|
const result = try os.ChildProcess.exec(allocator, args, null, null, max_doc_file_size);
|
|
|
|
switch (result.term) {
|
|
|
|
os.ChildProcess.Term.Exited => |exit_code| {
|
|
|
|
if (exit_code != 0) {
|
|
|
|
warn("{}\nThe following command exited with code {}:\n", result.stderr, exit_code);
|
|
|
|
for (args) |arg| warn("{} ", arg) else warn("\n");
|
|
|
|
return error.ChildExitError;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
warn("{}\nThe following command crashed:\n", result.stderr);
|
|
|
|
for (args) |arg| warn("{} ", arg) else warn("\n");
|
|
|
|
return error.ChildCrashed;
|
|
|
|
},
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|