implement new async syntax in self-hosted compiler

This commit is contained in:
Vexu 2019-08-16 12:24:06 +03:00 committed by Andrew Kelley
parent 1e3b6816a8
commit 2151f84d59
8 changed files with 41 additions and 145 deletions

View File

@ -9987,7 +9987,7 @@ TypeExpr <- PrefixTypeOp* ErrorUnionExpr
ErrorUnionExpr <- SuffixExpr (EXCLAMATIONMARK TypeExpr)?
SuffixExpr
<- AsyncPrefix PrimaryTypeExpr SuffixOp* FnCallArguments
<- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments
/ PrimaryTypeExpr (SuffixOp / FnCallArguments)*
PrimaryTypeExpr
@ -10063,7 +10063,7 @@ FnCC
<- KEYWORD_nakedcc
/ KEYWORD_stdcallcc
/ KEYWORD_extern
/ KEYWORD_async (LARROW TypeExpr RARROW)?
/ KEYWORD_async
ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
@ -10168,8 +10168,6 @@ SuffixOp
/ DOTASTERISK
/ DOTQUESTIONMARK
AsyncPrefix <- KEYWORD_async (LARROW PrefixExpr RARROW)?
FnCallArguments <- LPAREN ExprList RPAREN
# Ptr specific

View File

@ -1181,7 +1181,6 @@ pub const Builder = struct {
ast.Node.Id.ErrorTag => return error.Unimplemented,
ast.Node.Id.AsmInput => return error.Unimplemented,
ast.Node.Id.AsmOutput => return error.Unimplemented,
ast.Node.Id.AsyncAttribute => return error.Unimplemented,
ast.Node.Id.ParamDecl => return error.Unimplemented,
ast.Node.Id.FieldInitializer => return error.Unimplemented,
ast.Node.Id.EnumLiteral => return error.Unimplemented,

View File

@ -1037,7 +1037,7 @@ fn transCreateNodeFnCall(c: *Context, fn_expr: *ast.Node) !*ast.Node.SuffixOp {
.op = ast.Node.SuffixOp.Op{
.Call = ast.Node.SuffixOp.Op.Call{
.params = ast.Node.SuffixOp.Op.Call.ParamList.init(c.a()),
.async_attr = null,
.async_token = null,
},
},
.rtoken = undefined, // set after appending args
@ -1355,7 +1355,6 @@ fn finishTransFnProto(
.var_args_token = null, // TODO this field is broken in the AST data model
.extern_export_inline_token = extern_export_inline_tok,
.cc_token = cc_tok,
.async_attr = null,
.body_node = null,
.lib_name = null,
.align_expr = null,

View File

@ -113,7 +113,6 @@ static AstNode *ast_parse_multiply_op(ParseContext *pc);
static AstNode *ast_parse_prefix_op(ParseContext *pc);
static AstNode *ast_parse_prefix_type_op(ParseContext *pc);
static AstNode *ast_parse_suffix_op(ParseContext *pc);
static AstNode *ast_parse_async_prefix(ParseContext *pc);
static AstNode *ast_parse_fn_call_argumnets(ParseContext *pc);
static AstNode *ast_parse_array_type_start(ParseContext *pc);
static AstNode *ast_parse_ptr_type_start(ParseContext *pc);
@ -1389,22 +1388,18 @@ static AstNode *ast_parse_error_union_expr(ParseContext *pc) {
}
// SuffixExpr
// <- AsyncPrefix PrimaryTypeExpr SuffixOp* FnCallArguments
// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments
// / PrimaryTypeExpr (SuffixOp / FnCallArguments)*
static AstNode *ast_parse_suffix_expr(ParseContext *pc) {
AstNode *async_call = ast_parse_async_prefix(pc);
if (async_call != nullptr) {
Token *async_token = eat_token_if(pc, TokenIdKeywordAsync);
if (async_token != nullptr) {
if (eat_token_if(pc, TokenIdKeywordFn) != nullptr) {
// HACK: If we see the keyword `fn`, then we assume that
// we are parsing an async fn proto, and not a call.
// We therefore put back all tokens consumed by the async
// prefix...
// HACK: This loop is not actually enough to put back all the
// tokens. Let's hope this is fine for most code right now
// and wait till we get the async rework for a syntax update.
do {
put_back_token(pc);
} while (peek_token(pc)->id != TokenIdKeywordAsync);
put_back_token(pc);
put_back_token(pc);
return ast_parse_primary_type_expr(pc);
}
@ -1446,10 +1441,14 @@ static AstNode *ast_parse_suffix_expr(ParseContext *pc) {
ast_invalid_token_error(pc, peek_token(pc));
assert(args->type == NodeTypeFnCallExpr);
async_call->data.fn_call_expr.fn_ref_expr = child;
async_call->data.fn_call_expr.params = args->data.fn_call_expr.params;
async_call->data.fn_call_expr.is_builtin = false;
return async_call;
AstNode *res = ast_create_node(pc, NodeTypeFnCallExpr, async_token);
res->data.fn_call_expr.is_async = true;
res->data.fn_call_expr.seen = false;
res->data.fn_call_expr.fn_ref_expr = child;
res->data.fn_call_expr.params = args->data.fn_call_expr.params;
res->data.fn_call_expr.is_builtin = false;
return res;
}
AstNode *res = ast_parse_primary_type_expr(pc);
@ -1501,7 +1500,7 @@ static AstNode *ast_parse_suffix_expr(ParseContext *pc) {
// <- BUILTINIDENTIFIER FnCallArguments
// / CHAR_LITERAL
// / ContainerDecl
// / DOT IDENTIFIER
// / DOT IDENTIFIER
// / ErrorSetDecl
// / FLOAT
// / FnProto
@ -2016,7 +2015,7 @@ static AstNode *ast_parse_link_section(ParseContext *pc) {
// <- KEYWORD_nakedcc
// / KEYWORD_stdcallcc
// / KEYWORD_extern
// / KEYWORD_async (LARROW TypeExpr RARROW)?
// / KEYWORD_async
static Optional<AstNodeFnProto> ast_parse_fn_cc(ParseContext *pc) {
AstNodeFnProto res = {};
if (eat_token_if(pc, TokenIdKeywordNakedCC) != nullptr) {
@ -2657,19 +2656,6 @@ static AstNode *ast_parse_suffix_op(ParseContext *pc) {
return nullptr;
}
// AsyncPrefix <- KEYWORD_async (LARROW PrefixExpr RARROW)?
static AstNode *ast_parse_async_prefix(ParseContext *pc) {
Token *async = eat_token_if(pc, TokenIdKeywordAsync);
if (async == nullptr)
return nullptr;
AstNode *res = ast_create_node(pc, NodeTypeFnCallExpr, async);
res->data.fn_call_expr.is_async = true;
res->data.fn_call_expr.seen = false;
return res;
}
// FnCallArguments <- LPAREN ExprList RPAREN
static AstNode *ast_parse_fn_call_argumnets(ParseContext *pc) {
Token *paren = eat_token_if(pc, TokenIdLParen);

View File

@ -434,7 +434,6 @@ pub const Node = struct {
ErrorTag,
AsmInput,
AsmOutput,
AsyncAttribute,
ParamDecl,
FieldInitializer,
};
@ -838,36 +837,6 @@ pub const Node = struct {
}
};
pub const AsyncAttribute = struct {
base: Node,
async_token: TokenIndex,
allocator_type: ?*Node,
rangle_bracket: ?TokenIndex,
pub fn iterate(self: *AsyncAttribute, index: usize) ?*Node {
var i = index;
if (self.allocator_type) |allocator_type| {
if (i < 1) return allocator_type;
i -= 1;
}
return null;
}
pub fn firstToken(self: *const AsyncAttribute) TokenIndex {
return self.async_token;
}
pub fn lastToken(self: *const AsyncAttribute) TokenIndex {
if (self.rangle_bracket) |rangle_bracket| {
return rangle_bracket;
}
return self.async_token;
}
};
pub const FnProto = struct {
base: Node,
doc_comments: ?*DocComment,
@ -879,7 +848,6 @@ pub const Node = struct {
var_args_token: ?TokenIndex,
extern_export_inline_token: ?TokenIndex,
cc_token: ?TokenIndex,
async_attr: ?*AsyncAttribute,
body_node: ?*Node,
lib_name: ?*Node, // populated if this is an extern declaration
align_expr: ?*Node, // populated if align(A) is present
@ -935,7 +903,6 @@ pub const Node = struct {
pub fn firstToken(self: *const FnProto) TokenIndex {
if (self.visib_token) |visib_token| return visib_token;
if (self.async_attr) |async_attr| return async_attr.firstToken();
if (self.extern_export_inline_token) |extern_export_inline_token| return extern_export_inline_token;
assert(self.lib_name == null);
if (self.cc_token) |cc_token| return cc_token;
@ -1699,7 +1666,7 @@ pub const Node = struct {
pub const Call = struct {
params: ParamList,
async_attr: ?*AsyncAttribute,
async_token: ?TokenIndex,
pub const ParamList = SegmentedList(*Node, 2);
};
@ -1752,7 +1719,7 @@ pub const Node = struct {
pub fn firstToken(self: *const SuffixOp) TokenIndex {
switch (self.op) {
.Call => |*call_info| if (call_info.async_attr) |async_attr| return async_attr.firstToken(),
.Call => |*call_info| if (call_info.async_token) |async_token| return async_token,
else => {},
}
return self.lhs.firstToken();

View File

@ -277,7 +277,7 @@ fn parseTopLevelDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
/// FnProto <- FnCC? KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? EXCLAMATIONMARK? (KEYWORD_var / TypeExpr)
fn parseFnProto(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const cc = try parseFnCC(arena, it, tree);
const cc = parseFnCC(arena, it, tree);
const fn_token = eatToken(it, .Keyword_fn) orelse {
if (cc == null) return null else return error.ParseError;
};
@ -320,7 +320,6 @@ fn parseFnProto(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
.var_args_token = var_args_token,
.extern_export_inline_token = null,
.cc_token = null,
.async_attr = null,
.body_node = null,
.lib_name = null,
.align_expr = align_expr,
@ -331,7 +330,6 @@ fn parseFnProto(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
switch (kind) {
.CC => |token| fn_proto_node.cc_token = token,
.Extern => |token| fn_proto_node.extern_export_inline_token = token,
.Async => |node| fn_proto_node.async_attr = node,
}
}
@ -1092,10 +1090,19 @@ fn parseErrorUnionExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*No
}
/// SuffixExpr
/// <- AsyncPrefix PrimaryTypeExpr SuffixOp* FnCallArguments
/// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments
/// / PrimaryTypeExpr (SuffixOp / FnCallArguments)*
fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
if (try parseAsyncPrefix(arena, it, tree)) |async_node| {
if (eatToken(it, .Keyword_async)) |async_token| {
if (eatToken(it, .Keyword_fn)) |token_fn| {
// HACK: If we see the keyword `fn`, then we assume that
// we are parsing an async fn proto, and not a call.
// We therefore put back all tokens consumed by the async
// prefix...
putBackToken(it, token_fn);
putBackToken(it, async_token);
return parsePrimaryTypeExpr(arena, it, tree);
}
// TODO: Implement hack for parsing `async fn ...` in ast_parse_suffix_expr
var res = try expectNode(arena, it, tree, parsePrimaryTypeExpr, AstError{
.ExpectedPrimaryTypeExpr = AstError.ExpectedPrimaryTypeExpr{ .token = it.index },
@ -1116,7 +1123,6 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
});
return null;
};
const node = try arena.create(Node.SuffixOp);
node.* = Node.SuffixOp{
.base = Node{ .id = .SuffixOp },
@ -1124,14 +1130,13 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
.op = Node.SuffixOp.Op{
.Call = Node.SuffixOp.Op.Call{
.params = params.list,
.async_attr = async_node.cast(Node.AsyncAttribute).?,
.async_token = async_token,
},
},
.rtoken = params.rparen,
};
return &node.base;
}
if (try parsePrimaryTypeExpr(arena, it, tree)) |expr| {
var res = expr;
@ -1153,7 +1158,7 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
.op = Node.SuffixOp.Op{
.Call = Node.SuffixOp.Op.Call{
.params = params.list,
.async_attr = null,
.async_token = null,
},
},
.rtoken = params.rparen,
@ -1653,36 +1658,18 @@ fn parseLinkSection(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
/// <- KEYWORD_nakedcc
/// / KEYWORD_stdcallcc
/// / KEYWORD_extern
/// / KEYWORD_async (LARROW TypeExpr RARROW)?
fn parseFnCC(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?FnCC {
/// / KEYWORD_async
fn parseFnCC(arena: *Allocator, it: *TokenIterator, tree: *Tree) ?FnCC {
if (eatToken(it, .Keyword_nakedcc)) |token| return FnCC{ .CC = token };
if (eatToken(it, .Keyword_stdcallcc)) |token| return FnCC{ .CC = token };
if (eatToken(it, .Keyword_extern)) |token| return FnCC{ .Extern = token };
if (eatToken(it, .Keyword_async)) |token| {
const node = try arena.create(Node.AsyncAttribute);
node.* = Node.AsyncAttribute{
.base = Node{ .id = .AsyncAttribute },
.async_token = token,
.allocator_type = null,
.rangle_bracket = null,
};
if (eatToken(it, .AngleBracketLeft)) |_| {
const type_expr = try expectNode(arena, it, tree, parseTypeExpr, AstError{
.ExpectedTypeExpr = AstError.ExpectedTypeExpr{ .token = it.index },
});
const rarrow = try expectToken(it, tree, .AngleBracketRight);
node.allocator_type = type_expr;
node.rangle_bracket = rarrow;
}
return FnCC{ .Async = node };
}
if (eatToken(it, .Keyword_async)) |token| return FnCC{ .CC = token };
return null;
}
const FnCC = union(enum) {
CC: TokenIndex,
Extern: TokenIndex,
Async: *Node.AsyncAttribute,
};
/// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
@ -2409,28 +2396,6 @@ fn parseSuffixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
return &node.base;
}
/// AsyncPrefix <- KEYWORD_async (LARROW PrefixExpr RARROW)?
fn parseAsyncPrefix(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
const async_token = eatToken(it, .Keyword_async) orelse return null;
var rangle_bracket: ?TokenIndex = null;
const expr_node = if (eatToken(it, .AngleBracketLeft)) |_| blk: {
const prefix_expr = try expectNode(arena, it, tree, parsePrefixExpr, AstError{
.ExpectedPrefixExpr = AstError.ExpectedPrefixExpr{ .token = it.index },
});
rangle_bracket = try expectToken(it, tree, .AngleBracketRight);
break :blk prefix_expr;
} else null;
const node = try arena.create(Node.AsyncAttribute);
node.* = Node.AsyncAttribute{
.base = Node{ .id = .AsyncAttribute },
.async_token = async_token,
.allocator_type = expr_node,
.rangle_bracket = rangle_bracket,
};
return &node.base;
}
/// FnCallArguments <- LPAREN ExprList RPAREN
/// ExprList <- (Expr COMMA)* Expr?
fn parseFnCallArguments(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?AnnotatedParamList {

View File

@ -210,7 +210,7 @@ test "zig fmt: spaces around slice operator" {
test "zig fmt: async call in if condition" {
try testCanonical(
\\comptime {
\\ if (async<a> b()) {
\\ if (async b()) {
\\ a();
\\ }
\\}
@ -1118,7 +1118,7 @@ test "zig fmt: first line comment in struct initializer" {
\\pub async fn acquire(self: *Self) HeldLock {
\\ return HeldLock{
\\ // guaranteed allocation elision
\\ .held = await (async self.lock.acquire() catch unreachable),
\\ .held = self.lock.acquire(),
\\ .value = &self.private_data,
\\ };
\\}

View File

@ -284,20 +284,6 @@ fn renderExpression(
return renderExpression(allocator, stream, tree, indent, start_col, comptime_node.expr, space);
},
ast.Node.Id.AsyncAttribute => {
const async_attr = @fieldParentPtr(ast.Node.AsyncAttribute, "base", base);
if (async_attr.allocator_type) |allocator_type| {
try renderToken(tree, stream, async_attr.async_token, indent, start_col, Space.None); // async
try renderToken(tree, stream, tree.nextToken(async_attr.async_token), indent, start_col, Space.None); // <
try renderExpression(allocator, stream, tree, indent, start_col, allocator_type, Space.None); // allocator
return renderToken(tree, stream, tree.nextToken(allocator_type.lastToken()), indent, start_col, space); // >
} else {
return renderToken(tree, stream, async_attr.async_token, indent, start_col, space); // async
}
},
ast.Node.Id.Suspend => {
const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base);
@ -459,8 +445,8 @@ fn renderExpression(
switch (suffix_op.op) {
@TagType(ast.Node.SuffixOp.Op).Call => |*call_info| {
if (call_info.async_attr) |async_attr| {
try renderExpression(allocator, stream, tree, indent, start_col, &async_attr.base, Space.Space);
if (call_info.async_token) |async_token| {
try renderToken(tree, stream, async_token, indent, start_col, Space.Space);
}
try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None);
@ -1121,10 +1107,6 @@ fn renderExpression(
try renderToken(tree, stream, cc_token, indent, start_col, Space.Space); // stdcallcc
}
if (fn_proto.async_attr) |async_attr| {
try renderExpression(allocator, stream, tree, indent, start_col, &async_attr.base, Space.Space);
}
const lparen = if (fn_proto.name_token) |name_token| blk: {
try renderToken(tree, stream, fn_proto.fn_token, indent, start_col, Space.Space); // fn
try renderToken(tree, stream, name_token, indent, start_col, Space.None); // name