mirror of
https://github.com/ziglang/zig.git
synced 2024-11-15 00:26:57 +00:00
parent
345f8db1c4
commit
21328e0036
@ -469,6 +469,7 @@ pub const Node = struct {
|
||||
doc_comments: ?*DocComment,
|
||||
decls: DeclList,
|
||||
eof_token: TokenIndex,
|
||||
shebang: ?TokenIndex,
|
||||
|
||||
pub const DeclList = SegmentedList(*Node, 4);
|
||||
|
||||
@ -480,6 +481,7 @@ pub const Node = struct {
|
||||
}
|
||||
|
||||
pub fn firstToken(self: *const Root) TokenIndex {
|
||||
if (self.shebang) |shebang| return shebang;
|
||||
return if (self.decls.len == 0) self.eof_token else (self.decls.at(0).*).firstToken();
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,7 @@ pub fn parse(allocator: *mem.Allocator, source: []const u8) !ast.Tree {
|
||||
.base = ast.Node{ .id = ast.Node.Id.Root },
|
||||
.decls = ast.Node.Root.DeclList.init(arena),
|
||||
.doc_comments = null,
|
||||
.shebang = null,
|
||||
// initialized when we get the eof token
|
||||
.eof_token = undefined,
|
||||
});
|
||||
@ -41,6 +42,15 @@ pub fn parse(allocator: *mem.Allocator, source: []const u8) !ast.Tree {
|
||||
}
|
||||
var tok_it = tree.tokens.iterator(0);
|
||||
|
||||
// skip over shebang line
|
||||
shebang: {
|
||||
const shebang_tok_index = tok_it.index;
|
||||
const shebang_tok_ptr = tok_it.peek() orelse break :shebang;
|
||||
if (shebang_tok_ptr.id != Token.Id.ShebangLine) break :shebang;
|
||||
root_node.shebang = shebang_tok_index;
|
||||
_ = tok_it.next();
|
||||
}
|
||||
|
||||
// skip over line comments at the top of the file
|
||||
while (true) {
|
||||
const next_tok = tok_it.peek() orelse break;
|
||||
|
@ -1,3 +1,11 @@
|
||||
test "zig fmt: shebang line" {
|
||||
try testCanonical(
|
||||
\\#!/usr/bin/env zig
|
||||
\\pub fn main() void {}
|
||||
\\
|
||||
);
|
||||
}
|
||||
|
||||
test "zig fmt: correctly move doc comments on struct fields" {
|
||||
try testTransform(
|
||||
\\pub const section_64 = extern struct {
|
||||
|
@ -67,8 +67,14 @@ fn renderRoot(
|
||||
stream: var,
|
||||
tree: *ast.Tree,
|
||||
) (@typeOf(stream).Child.Error || Error)!void {
|
||||
// render all the line comments at the beginning of the file
|
||||
var tok_it = tree.tokens.iterator(0);
|
||||
|
||||
// render the shebang line
|
||||
if (tree.root_node.shebang) |shebang| {
|
||||
try stream.write(tree.tokenSlice(shebang));
|
||||
}
|
||||
|
||||
// render all the line comments at the beginning of the file
|
||||
while (tok_it.next()) |token| {
|
||||
if (token.id != Token.Id.LineComment) break;
|
||||
try stream.print("{}\n", mem.trimRight(u8, tree.tokenSlicePtr(token), " "));
|
||||
|
@ -145,6 +145,7 @@ pub const Token = struct {
|
||||
LineComment,
|
||||
DocComment,
|
||||
BracketStarBracket,
|
||||
ShebangLine,
|
||||
Keyword_align,
|
||||
Keyword_and,
|
||||
Keyword_asm,
|
||||
@ -208,12 +209,25 @@ pub const Tokenizer = struct {
|
||||
}
|
||||
|
||||
pub fn init(buffer: []const u8) Tokenizer {
|
||||
if (mem.startsWith(u8, buffer, "#!")) {
|
||||
const src_start = if (mem.indexOfScalar(u8, buffer, '\n')) |i| i + 1 else buffer.len;
|
||||
return Tokenizer{
|
||||
.buffer = buffer,
|
||||
.index = src_start,
|
||||
.pending_invalid_token = Token{
|
||||
.id = Token.Id.ShebangLine,
|
||||
.start = 0,
|
||||
.end = src_start,
|
||||
},
|
||||
};
|
||||
} else {
|
||||
return Tokenizer{
|
||||
.buffer = buffer,
|
||||
.index = 0,
|
||||
.pending_invalid_token = null,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const State = enum {
|
||||
Start,
|
||||
|
Loading…
Reference in New Issue
Block a user